[ 553.901352] env[68798]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=68798) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 553.901699] env[68798]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=68798) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 553.901699] env[68798]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=68798) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 553.902091] env[68798]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 553.993190] env[68798]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=68798) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 554.002985] env[68798]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=68798) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 554.265375] env[68798]: INFO nova.virt.driver [None req-5dac0aae-5cd4-4db6-99da-a2533ed80026 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 554.348835] env[68798]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.349094] env[68798]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.349264] env[68798]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=68798) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 557.579469] env[68798]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-a117989b-dfd6-4fa1-bfeb-6b1dcaa2a89f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.595413] env[68798]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=68798) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 557.595577] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-3eba29ad-e2cc-4a4f-ab4f-a511adc81515 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.621219] env[68798]: INFO oslo_vmware.api [-] Successfully established new session; session ID is fddbd. [ 557.621394] env[68798]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.272s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 557.621986] env[68798]: INFO nova.virt.vmwareapi.driver [None req-5dac0aae-5cd4-4db6-99da-a2533ed80026 None None] VMware vCenter version: 7.0.3 [ 557.625441] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cf435b9-a5a8-4eae-a0ee-b8c0e0738c14 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.643194] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d16a1f3a-5c9a-42bc-86a4-83b3968996cf {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.649368] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2a15c6c-2313-4fc2-bdf1-f697220079e8 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.656369] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66f4a712-53ac-4d2c-9f90-7afb1b4c4f05 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.669487] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e52c76ee-c618-4732-a821-4fe2d3df6d1b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.675696] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-772753e4-d057-4d76-8aff-0b0bebf7530c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.706233] env[68798]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-16bf8a41-e605-47df-983a-8507a8b45ac0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.712374] env[68798]: DEBUG nova.virt.vmwareapi.driver [None req-5dac0aae-5cd4-4db6-99da-a2533ed80026 None None] Extension org.openstack.compute already exists. {{(pid=68798) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 557.715108] env[68798]: INFO nova.compute.provider_config [None req-5dac0aae-5cd4-4db6-99da-a2533ed80026 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 557.735975] env[68798]: DEBUG nova.context [None req-5dac0aae-5cd4-4db6-99da-a2533ed80026 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),b724af3f-72c5-4938-9b37-de0ce10f4a41(cell1) {{(pid=68798) load_cells /opt/stack/nova/nova/context.py:464}} [ 557.738338] env[68798]: DEBUG oslo_concurrency.lockutils [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.738558] env[68798]: DEBUG oslo_concurrency.lockutils [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.739379] env[68798]: DEBUG oslo_concurrency.lockutils [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 557.739902] env[68798]: DEBUG oslo_concurrency.lockutils [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] Acquiring lock "b724af3f-72c5-4938-9b37-de0ce10f4a41" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.740103] env[68798]: DEBUG oslo_concurrency.lockutils [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] Lock "b724af3f-72c5-4938-9b37-de0ce10f4a41" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.741129] env[68798]: DEBUG oslo_concurrency.lockutils [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] Lock "b724af3f-72c5-4938-9b37-de0ce10f4a41" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 557.762561] env[68798]: INFO dbcounter [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] Registered counter for database nova_cell0 [ 557.770798] env[68798]: INFO dbcounter [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] Registered counter for database nova_cell1 [ 557.774439] env[68798]: DEBUG oslo_db.sqlalchemy.engines [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=68798) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 557.774821] env[68798]: DEBUG oslo_db.sqlalchemy.engines [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=68798) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 557.779702] env[68798]: DEBUG dbcounter [-] [68798] Writer thread running {{(pid=68798) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 557.780604] env[68798]: DEBUG dbcounter [-] [68798] Writer thread running {{(pid=68798) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 557.783133] env[68798]: ERROR nova.db.main.api [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 557.783133] env[68798]: result = function(*args, **kwargs) [ 557.783133] env[68798]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 557.783133] env[68798]: return func(*args, **kwargs) [ 557.783133] env[68798]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 557.783133] env[68798]: result = fn(*args, **kwargs) [ 557.783133] env[68798]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 557.783133] env[68798]: return f(*args, **kwargs) [ 557.783133] env[68798]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 557.783133] env[68798]: return db.service_get_minimum_version(context, binaries) [ 557.783133] env[68798]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 557.783133] env[68798]: _check_db_access() [ 557.783133] env[68798]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 557.783133] env[68798]: stacktrace = ''.join(traceback.format_stack()) [ 557.783133] env[68798]: [ 557.783924] env[68798]: ERROR nova.db.main.api [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 557.783924] env[68798]: result = function(*args, **kwargs) [ 557.783924] env[68798]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 557.783924] env[68798]: return func(*args, **kwargs) [ 557.783924] env[68798]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 557.783924] env[68798]: result = fn(*args, **kwargs) [ 557.783924] env[68798]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 557.783924] env[68798]: return f(*args, **kwargs) [ 557.783924] env[68798]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 557.783924] env[68798]: return db.service_get_minimum_version(context, binaries) [ 557.783924] env[68798]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 557.783924] env[68798]: _check_db_access() [ 557.783924] env[68798]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 557.783924] env[68798]: stacktrace = ''.join(traceback.format_stack()) [ 557.783924] env[68798]: [ 557.784354] env[68798]: WARNING nova.objects.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] Failed to get minimum service version for cell b724af3f-72c5-4938-9b37-de0ce10f4a41 [ 557.784466] env[68798]: WARNING nova.objects.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 557.784898] env[68798]: DEBUG oslo_concurrency.lockutils [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] Acquiring lock "singleton_lock" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.785075] env[68798]: DEBUG oslo_concurrency.lockutils [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] Acquired lock "singleton_lock" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 557.785330] env[68798]: DEBUG oslo_concurrency.lockutils [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] Releasing lock "singleton_lock" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 557.785653] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] Full set of CONF: {{(pid=68798) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 557.785799] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ******************************************************************************** {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2600}} [ 557.785929] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] Configuration options gathered from: {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2601}} [ 557.786080] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 557.786277] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2603}} [ 557.786407] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ================================================================================ {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2605}} [ 557.786622] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] allow_resize_to_same_host = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.786794] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] arq_binding_timeout = 300 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.786930] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] backdoor_port = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.787071] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] backdoor_socket = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.787242] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] block_device_allocate_retries = 60 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.787406] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] block_device_allocate_retries_interval = 3 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.787576] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cert = self.pem {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.787742] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.787910] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] compute_monitors = [] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.788089] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] config_dir = [] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.788264] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] config_drive_format = iso9660 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.788400] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.788564] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] config_source = [] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.788733] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] console_host = devstack {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.788896] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] control_exchange = nova {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.789066] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cpu_allocation_ratio = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.789230] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] daemon = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.789394] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] debug = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.789550] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] default_access_ip_network_name = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.789727] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] default_availability_zone = nova {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.789887] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] default_ephemeral_format = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.790059] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] default_green_pool_size = 1000 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.790299] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.790464] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] default_schedule_zone = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.790623] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] disk_allocation_ratio = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.790785] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] enable_new_services = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.790963] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] enabled_apis = ['osapi_compute'] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.791142] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] enabled_ssl_apis = [] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.791309] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] flat_injected = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.791470] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] force_config_drive = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.791628] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] force_raw_images = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.791820] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] graceful_shutdown_timeout = 5 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.791993] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] heal_instance_info_cache_interval = 60 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.792229] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] host = cpu-1 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.792406] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.792569] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] initial_disk_allocation_ratio = 1.0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.792729] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] initial_ram_allocation_ratio = 1.0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.792955] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.793128] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] instance_build_timeout = 0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.793293] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] instance_delete_interval = 300 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.793464] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] instance_format = [instance: %(uuid)s] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.793632] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] instance_name_template = instance-%08x {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.793797] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] instance_usage_audit = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.793972] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] instance_usage_audit_period = month {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.794174] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.794354] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] instances_path = /opt/stack/data/nova/instances {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.794523] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] internal_service_availability_zone = internal {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.794682] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] key = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.794844] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] live_migration_retry_count = 30 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.795022] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] log_config_append = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.795194] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.795357] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] log_dir = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.795515] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] log_file = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.795644] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] log_options = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.795807] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] log_rotate_interval = 1 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.795976] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] log_rotate_interval_type = days {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.796168] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] log_rotation_type = none {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.796297] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.796426] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.796595] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.796764] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.796895] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.797067] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] long_rpc_timeout = 1800 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.797231] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] max_concurrent_builds = 10 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.797393] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] max_concurrent_live_migrations = 1 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.797554] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] max_concurrent_snapshots = 5 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.797713] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] max_local_block_devices = 3 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.797878] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] max_logfile_count = 30 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.798041] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] max_logfile_size_mb = 200 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.798205] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] maximum_instance_delete_attempts = 5 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.798374] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] metadata_listen = 0.0.0.0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.798541] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] metadata_listen_port = 8775 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.798709] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] metadata_workers = 2 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.798869] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] migrate_max_retries = -1 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.799049] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] mkisofs_cmd = genisoimage {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.800106] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] my_block_storage_ip = 10.180.1.21 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.800106] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] my_ip = 10.180.1.21 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.800106] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] network_allocate_retries = 0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.800106] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.800106] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] osapi_compute_listen = 0.0.0.0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.800106] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] osapi_compute_listen_port = 8774 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.800352] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] osapi_compute_unique_server_name_scope = {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.800385] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] osapi_compute_workers = 2 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.800534] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] password_length = 12 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.800702] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] periodic_enable = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.800865] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] periodic_fuzzy_delay = 60 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.801049] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] pointer_model = usbtablet {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.801224] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] preallocate_images = none {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.801386] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] publish_errors = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.801517] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] pybasedir = /opt/stack/nova {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.801674] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ram_allocation_ratio = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.801871] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] rate_limit_burst = 0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.802074] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] rate_limit_except_level = CRITICAL {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.802241] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] rate_limit_interval = 0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.802405] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] reboot_timeout = 0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.802566] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] reclaim_instance_interval = 0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.802727] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] record = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.802929] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] reimage_timeout_per_gb = 60 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.803117] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] report_interval = 120 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.803283] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] rescue_timeout = 0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.803443] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] reserved_host_cpus = 0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.803603] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] reserved_host_disk_mb = 0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.803763] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] reserved_host_memory_mb = 512 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.803924] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] reserved_huge_pages = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.804112] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] resize_confirm_window = 0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.804297] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] resize_fs_using_block_device = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.804462] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] resume_guests_state_on_host_boot = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.804632] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.804794] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] rpc_response_timeout = 60 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.804956] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] run_external_periodic_tasks = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.805139] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] running_deleted_instance_action = reap {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.805300] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] running_deleted_instance_poll_interval = 1800 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.805460] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] running_deleted_instance_timeout = 0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.805619] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] scheduler_instance_sync_interval = 120 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.805786] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] service_down_time = 720 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.805955] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] servicegroup_driver = db {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.806127] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] shelved_offload_time = 0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.806291] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] shelved_poll_interval = 3600 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.806456] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] shutdown_timeout = 0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.806618] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] source_is_ipv6 = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.806777] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ssl_only = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.807041] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.807216] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] sync_power_state_interval = 600 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.807379] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] sync_power_state_pool_size = 1000 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.807548] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] syslog_log_facility = LOG_USER {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.807707] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] tempdir = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.807870] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] timeout_nbd = 10 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.808049] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] transport_url = **** {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.808216] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] update_resources_interval = 0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.808382] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] use_cow_images = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.808542] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] use_eventlog = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.808701] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] use_journal = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.808859] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] use_json = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.809031] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] use_rootwrap_daemon = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.809195] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] use_stderr = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.809355] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] use_syslog = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.809514] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vcpu_pin_set = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.809681] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vif_plugging_is_fatal = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.809848] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vif_plugging_timeout = 300 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.810033] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] virt_mkfs = [] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.810201] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] volume_usage_poll_interval = 0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.810362] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] watch_log_file = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.810531] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] web = /usr/share/spice-html5 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 557.810714] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_concurrency.disable_process_locking = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.811028] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.811213] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.811382] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.811555] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.811725] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.811918] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.812120] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api.auth_strategy = keystone {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.812297] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api.compute_link_prefix = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.812480] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.812660] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api.dhcp_domain = novalocal {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.812851] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api.enable_instance_password = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.813039] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api.glance_link_prefix = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.813211] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.813384] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.813551] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api.instance_list_per_project_cells = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.813716] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api.list_records_by_skipping_down_cells = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.813881] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api.local_metadata_per_cell = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.814066] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api.max_limit = 1000 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.814266] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api.metadata_cache_expiration = 15 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.814448] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api.neutron_default_tenant_id = default {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.814615] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api.use_neutron_default_nets = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.814789] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.814954] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.815135] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.815309] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.815478] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api.vendordata_dynamic_targets = [] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.815647] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api.vendordata_jsonfile_path = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.815829] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.816033] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.backend = dogpile.cache.memcached {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.816205] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.backend_argument = **** {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.816380] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.config_prefix = cache.oslo {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.816549] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.dead_timeout = 60.0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.816713] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.debug_cache_backend = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.816877] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.enable_retry_client = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.817050] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.enable_socket_keepalive = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.817226] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.enabled = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.817386] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.enforce_fips_mode = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.817547] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.expiration_time = 600 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.817707] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.hashclient_retry_attempts = 2 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.817869] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.hashclient_retry_delay = 1.0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.818042] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.memcache_dead_retry = 300 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.818205] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.memcache_password = **** {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.818372] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.818537] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.818700] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.memcache_pool_maxsize = 10 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.818865] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.819040] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.memcache_sasl_enabled = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.819228] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.819394] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.memcache_socket_timeout = 1.0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.819554] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.memcache_username = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.819721] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.proxies = [] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.819881] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.redis_password = **** {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.820067] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.redis_sentinel_service_name = mymaster {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.820248] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.820418] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.redis_server = localhost:6379 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.820579] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.redis_socket_timeout = 1.0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.820738] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.redis_username = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.820899] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.retry_attempts = 2 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.821076] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.retry_delay = 0.0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.821240] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.socket_keepalive_count = 1 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.821404] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.socket_keepalive_idle = 1 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.821563] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.socket_keepalive_interval = 1 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.821720] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.tls_allowed_ciphers = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.821908] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.tls_cafile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.822091] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.tls_certfile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.822264] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.tls_enabled = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.822425] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cache.tls_keyfile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.822597] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cinder.auth_section = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.822789] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cinder.auth_type = password {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.822967] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cinder.cafile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.823160] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cinder.catalog_info = volumev3::publicURL {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.823324] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cinder.certfile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.823491] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cinder.collect_timing = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.823656] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cinder.cross_az_attach = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.823820] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cinder.debug = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.823985] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cinder.endpoint_template = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.824190] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cinder.http_retries = 3 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.824366] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cinder.insecure = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.824530] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cinder.keyfile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.824703] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cinder.os_region_name = RegionOne {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.824869] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cinder.split_loggers = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.825040] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cinder.timeout = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.825217] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.825379] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] compute.cpu_dedicated_set = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.825538] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] compute.cpu_shared_set = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.825704] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] compute.image_type_exclude_list = [] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.825868] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.826040] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] compute.max_concurrent_disk_ops = 0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.826205] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] compute.max_disk_devices_to_attach = -1 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.826368] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.826538] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.826700] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] compute.resource_provider_association_refresh = 300 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.826861] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.827037] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] compute.shutdown_retry_interval = 10 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.827223] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.827404] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] conductor.workers = 2 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.827581] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] console.allowed_origins = [] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.827742] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] console.ssl_ciphers = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.827914] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] console.ssl_minimum_version = default {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.828100] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] consoleauth.enforce_session_timeout = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.828296] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] consoleauth.token_ttl = 600 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.828479] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cyborg.cafile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.828641] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cyborg.certfile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.828807] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cyborg.collect_timing = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.828970] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cyborg.connect_retries = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.829143] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cyborg.connect_retry_delay = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.829306] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cyborg.endpoint_override = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.829496] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cyborg.insecure = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.829629] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cyborg.keyfile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.829790] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cyborg.max_version = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.829953] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cyborg.min_version = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.830127] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cyborg.region_name = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.830289] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cyborg.retriable_status_codes = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.830446] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cyborg.service_name = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.830616] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cyborg.service_type = accelerator {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.830781] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cyborg.split_loggers = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.830940] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cyborg.status_code_retries = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.831114] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cyborg.status_code_retry_delay = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.835353] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cyborg.timeout = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.835353] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.835353] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] cyborg.version = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.835353] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] database.backend = sqlalchemy {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.835353] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] database.connection = **** {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.835353] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] database.connection_debug = 0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.835667] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] database.connection_parameters = {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.835667] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] database.connection_recycle_time = 3600 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.835667] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] database.connection_trace = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.835667] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] database.db_inc_retry_interval = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.835667] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] database.db_max_retries = 20 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.835667] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] database.db_max_retry_interval = 10 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.835912] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] database.db_retry_interval = 1 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.835912] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] database.max_overflow = 50 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.835912] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] database.max_pool_size = 5 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.835912] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] database.max_retries = 10 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.835912] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.835912] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] database.mysql_wsrep_sync_wait = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.836145] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] database.pool_timeout = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.836145] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] database.retry_interval = 10 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.836145] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] database.slave_connection = **** {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.836145] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] database.sqlite_synchronous = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.836145] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] database.use_db_reconnect = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.836145] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api_database.backend = sqlalchemy {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.836335] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api_database.connection = **** {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.836335] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api_database.connection_debug = 0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.836335] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api_database.connection_parameters = {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.836335] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api_database.connection_recycle_time = 3600 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.836335] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api_database.connection_trace = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.836335] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api_database.db_inc_retry_interval = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.836535] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api_database.db_max_retries = 20 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.836535] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api_database.db_max_retry_interval = 10 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.836592] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api_database.db_retry_interval = 1 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.836758] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api_database.max_overflow = 50 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.836924] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api_database.max_pool_size = 5 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.837107] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api_database.max_retries = 10 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.837282] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.837444] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.837604] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api_database.pool_timeout = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.837766] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api_database.retry_interval = 10 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.837929] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api_database.slave_connection = **** {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.838100] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] api_database.sqlite_synchronous = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.838275] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] devices.enabled_mdev_types = [] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.838455] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.838626] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ephemeral_storage_encryption.default_format = luks {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.838791] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ephemeral_storage_encryption.enabled = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.838959] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.839143] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.api_servers = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.839312] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.cafile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.839477] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.certfile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.839648] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.collect_timing = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.839809] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.connect_retries = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.839971] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.connect_retry_delay = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.840160] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.debug = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.840350] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.default_trusted_certificate_ids = [] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.840527] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.enable_certificate_validation = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.840692] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.enable_rbd_download = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.840854] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.endpoint_override = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.841033] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.insecure = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.841207] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.keyfile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.841369] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.max_version = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.841533] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.min_version = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.841696] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.num_retries = 3 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.841894] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.rbd_ceph_conf = {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.842080] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.rbd_connect_timeout = 5 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.842256] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.rbd_pool = {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.842425] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.rbd_user = {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.842589] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.region_name = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.842761] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.retriable_status_codes = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.842939] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.service_name = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.843123] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.service_type = image {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.843287] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.split_loggers = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.843448] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.status_code_retries = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.843604] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.status_code_retry_delay = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.843761] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.timeout = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.843944] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.844145] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.verify_glance_signatures = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.844323] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] glance.version = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.844493] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] guestfs.debug = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.844668] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] mks.enabled = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.845040] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.845237] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] image_cache.manager_interval = 2400 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.845411] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] image_cache.precache_concurrency = 1 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.845584] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] image_cache.remove_unused_base_images = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.845756] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.845927] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.846122] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] image_cache.subdirectory_name = _base {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.846298] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ironic.api_max_retries = 60 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.846465] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ironic.api_retry_interval = 2 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.846626] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ironic.auth_section = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.846789] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ironic.auth_type = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.846952] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ironic.cafile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.847125] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ironic.certfile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.847291] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ironic.collect_timing = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.847457] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ironic.conductor_group = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.847618] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ironic.connect_retries = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.847776] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ironic.connect_retry_delay = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.847935] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ironic.endpoint_override = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.848109] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ironic.insecure = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.848269] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ironic.keyfile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.848429] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ironic.max_version = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.848586] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ironic.min_version = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.848749] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ironic.peer_list = [] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.848908] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ironic.region_name = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.849078] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ironic.retriable_status_codes = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.849244] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ironic.serial_console_state_timeout = 10 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.849401] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ironic.service_name = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.849569] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ironic.service_type = baremetal {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.849729] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ironic.shard = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.849892] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ironic.split_loggers = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.850074] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ironic.status_code_retries = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.850236] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ironic.status_code_retry_delay = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.850395] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ironic.timeout = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.850573] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.850734] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ironic.version = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.850914] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.851102] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] key_manager.fixed_key = **** {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.851287] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.851448] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] barbican.barbican_api_version = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.851609] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] barbican.barbican_endpoint = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.851793] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] barbican.barbican_endpoint_type = public {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.851970] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] barbican.barbican_region_name = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.852145] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] barbican.cafile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.852306] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] barbican.certfile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.852470] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] barbican.collect_timing = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.852630] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] barbican.insecure = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.852808] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] barbican.keyfile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.852987] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] barbican.number_of_retries = 60 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.853166] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] barbican.retry_delay = 1 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.853329] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] barbican.send_service_user_token = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.853491] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] barbican.split_loggers = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.853649] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] barbican.timeout = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.853807] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] barbican.verify_ssl = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.853965] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] barbican.verify_ssl_path = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.854167] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] barbican_service_user.auth_section = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.854349] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] barbican_service_user.auth_type = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.854510] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] barbican_service_user.cafile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.854667] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] barbican_service_user.certfile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.854829] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] barbican_service_user.collect_timing = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.854989] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] barbican_service_user.insecure = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.855160] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] barbican_service_user.keyfile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.855321] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] barbican_service_user.split_loggers = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.855477] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] barbican_service_user.timeout = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.855644] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vault.approle_role_id = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.855803] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vault.approle_secret_id = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.855963] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vault.cafile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.856129] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vault.certfile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.856291] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vault.collect_timing = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.856449] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vault.insecure = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.856603] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vault.keyfile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.856770] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vault.kv_mountpoint = secret {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.856930] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vault.kv_path = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.857104] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vault.kv_version = 2 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.857268] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vault.namespace = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.857436] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vault.root_token_id = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.857598] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vault.split_loggers = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.857756] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vault.ssl_ca_crt_file = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.857914] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vault.timeout = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.858086] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vault.use_ssl = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.858265] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.858434] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] keystone.auth_section = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.858599] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] keystone.auth_type = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.858759] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] keystone.cafile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.858921] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] keystone.certfile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.859093] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] keystone.collect_timing = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.859260] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] keystone.connect_retries = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.859420] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] keystone.connect_retry_delay = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.859580] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] keystone.endpoint_override = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.859741] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] keystone.insecure = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.859897] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] keystone.keyfile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.860077] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] keystone.max_version = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.860235] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] keystone.min_version = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.860391] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] keystone.region_name = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.860547] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] keystone.retriable_status_codes = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.860704] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] keystone.service_name = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.860872] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] keystone.service_type = identity {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.861045] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] keystone.split_loggers = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.861207] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] keystone.status_code_retries = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.861365] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] keystone.status_code_retry_delay = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.861523] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] keystone.timeout = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.861703] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.861892] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] keystone.version = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.862121] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.connection_uri = {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.862291] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.cpu_mode = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.862461] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.cpu_model_extra_flags = [] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.862634] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.cpu_models = [] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.862825] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.cpu_power_governor_high = performance {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.863021] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.cpu_power_governor_low = powersave {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.863191] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.cpu_power_management = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.863364] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.863529] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.device_detach_attempts = 8 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.863692] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.device_detach_timeout = 20 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.863858] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.disk_cachemodes = [] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.864030] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.disk_prefix = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.864228] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.enabled_perf_events = [] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.864404] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.file_backed_memory = 0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.864569] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.gid_maps = [] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.864731] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.hw_disk_discard = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.864893] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.hw_machine_type = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.865082] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.images_rbd_ceph_conf = {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.865257] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.865449] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.865641] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.images_rbd_glance_store_name = {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.865818] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.images_rbd_pool = rbd {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.865990] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.images_type = default {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.866165] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.images_volume_group = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.866331] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.inject_key = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.866490] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.inject_partition = -2 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.866651] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.inject_password = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.866818] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.iscsi_iface = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.866985] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.iser_use_multipath = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.867164] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.live_migration_bandwidth = 0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.867330] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.867498] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.live_migration_downtime = 500 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.867658] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.867822] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.867987] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.live_migration_inbound_addr = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.868164] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.868324] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.live_migration_permit_post_copy = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.868490] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.live_migration_scheme = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.868666] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.live_migration_timeout_action = abort {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.868836] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.live_migration_tunnelled = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.869006] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.live_migration_uri = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.869185] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.live_migration_with_native_tls = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.869355] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.max_queues = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.869521] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.869760] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.869929] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.nfs_mount_options = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.870256] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.870433] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.870602] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.num_iser_scan_tries = 5 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.870765] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.num_memory_encrypted_guests = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.870933] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.871118] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.num_pcie_ports = 0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.871289] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.num_volume_scan_tries = 5 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.871455] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.pmem_namespaces = [] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.871618] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.quobyte_client_cfg = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.871931] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.872223] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.rbd_connect_timeout = 5 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.872400] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.872567] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.872732] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.rbd_secret_uuid = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.872918] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.rbd_user = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.873105] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.873282] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.remote_filesystem_transport = ssh {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.873445] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.rescue_image_id = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.873606] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.rescue_kernel_id = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.873767] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.rescue_ramdisk_id = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.873941] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.874133] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.rx_queue_size = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.874324] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.smbfs_mount_options = {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.874603] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.874777] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.snapshot_compression = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.874942] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.snapshot_image_format = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.875175] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.875344] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.sparse_logical_volumes = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.875510] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.swtpm_enabled = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.875682] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.swtpm_group = tss {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.875849] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.swtpm_user = tss {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.876031] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.sysinfo_serial = unique {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.876198] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.tb_cache_size = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.876358] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.tx_queue_size = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.876521] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.uid_maps = [] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.876683] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.use_virtio_for_bridges = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.876854] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.virt_type = kvm {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.877032] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.volume_clear = zero {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.877201] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.volume_clear_size = 0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.877368] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.volume_use_multipath = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.877540] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.vzstorage_cache_path = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.877710] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.877879] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.vzstorage_mount_group = qemu {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.878056] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.vzstorage_mount_opts = [] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.878230] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.878505] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.878687] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.vzstorage_mount_user = stack {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.878853] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.879041] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] neutron.auth_section = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.879221] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] neutron.auth_type = password {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.879384] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] neutron.cafile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.879549] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] neutron.certfile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.879715] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] neutron.collect_timing = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.879878] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] neutron.connect_retries = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.880055] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] neutron.connect_retry_delay = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.880235] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] neutron.default_floating_pool = public {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.880397] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] neutron.endpoint_override = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.880559] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] neutron.extension_sync_interval = 600 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.880720] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] neutron.http_retries = 3 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.880884] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] neutron.insecure = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.881055] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] neutron.keyfile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.881220] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] neutron.max_version = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.881394] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.881557] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] neutron.min_version = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.881725] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] neutron.ovs_bridge = br-int {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.881923] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] neutron.physnets = [] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.882117] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] neutron.region_name = RegionOne {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.882283] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] neutron.retriable_status_codes = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.882456] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] neutron.service_metadata_proxy = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.882618] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] neutron.service_name = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.882807] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] neutron.service_type = network {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.882990] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] neutron.split_loggers = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.883165] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] neutron.status_code_retries = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.883324] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] neutron.status_code_retry_delay = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.883481] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] neutron.timeout = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.883664] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.883826] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] neutron.version = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.884006] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] notifications.bdms_in_notifications = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.884217] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] notifications.default_level = INFO {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.884404] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] notifications.notification_format = unversioned {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.884570] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] notifications.notify_on_state_change = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.884745] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.884922] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] pci.alias = [] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.885103] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] pci.device_spec = [] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.885273] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] pci.report_in_placement = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.885449] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.auth_section = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.885624] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.auth_type = password {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.885793] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.885962] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.cafile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.886133] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.certfile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.886299] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.collect_timing = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.886459] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.connect_retries = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.886619] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.connect_retry_delay = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.886781] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.default_domain_id = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.886942] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.default_domain_name = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.887134] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.domain_id = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.887302] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.domain_name = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.887476] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.endpoint_override = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.887626] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.insecure = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.887786] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.keyfile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.887951] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.max_version = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.888117] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.min_version = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.888288] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.password = **** {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.888483] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.project_domain_id = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.888654] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.project_domain_name = Default {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.888834] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.project_id = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.889023] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.project_name = service {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.889200] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.region_name = RegionOne {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.889364] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.retriable_status_codes = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.889527] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.service_name = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.889696] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.service_type = placement {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.889861] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.split_loggers = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.890037] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.status_code_retries = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.890202] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.status_code_retry_delay = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.890365] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.system_scope = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.890522] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.timeout = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.890704] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.trust_id = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.890899] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.user_domain_id = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.891087] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.user_domain_name = Default {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.891254] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.user_id = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.891430] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.username = placement {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.891612] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.891790] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] placement.version = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.891993] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] quota.cores = 20 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.892200] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] quota.count_usage_from_placement = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.892455] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.892651] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] quota.injected_file_content_bytes = 10240 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.892871] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] quota.injected_file_path_length = 255 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.893088] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] quota.injected_files = 5 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.893268] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] quota.instances = 10 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.893439] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] quota.key_pairs = 100 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.893608] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] quota.metadata_items = 128 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.893773] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] quota.ram = 51200 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.893942] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] quota.recheck_quota = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.894124] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] quota.server_group_members = 10 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.894293] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] quota.server_groups = 10 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.894468] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.894634] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.894800] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] scheduler.image_metadata_prefilter = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.894962] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.895137] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] scheduler.max_attempts = 3 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.895303] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] scheduler.max_placement_results = 1000 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.895467] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.895628] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] scheduler.query_placement_for_image_type_support = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.895791] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.895994] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] scheduler.workers = 2 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.896212] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.896389] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.896573] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.896747] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.896914] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.897094] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.897264] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.897451] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.897619] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] filter_scheduler.host_subset_size = 1 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.897784] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.897946] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.898122] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.898292] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] filter_scheduler.isolated_hosts = [] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.898458] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] filter_scheduler.isolated_images = [] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.898624] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.898787] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.898958] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.899136] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] filter_scheduler.pci_in_placement = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.899301] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.899462] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.899624] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.899795] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.899973] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.900153] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.900314] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] filter_scheduler.track_instance_changes = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.900486] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.900655] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] metrics.required = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.900818] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] metrics.weight_multiplier = 1.0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.900981] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.901159] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] metrics.weight_setting = [] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.901487] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.901667] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] serial_console.enabled = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.901877] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] serial_console.port_range = 10000:20000 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.902078] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.902266] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.902440] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] serial_console.serialproxy_port = 6083 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.902609] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] service_user.auth_section = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.902807] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] service_user.auth_type = password {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.902989] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] service_user.cafile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.903168] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] service_user.certfile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.903332] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] service_user.collect_timing = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.903496] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] service_user.insecure = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.903655] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] service_user.keyfile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.903827] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] service_user.send_service_user_token = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.903995] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] service_user.split_loggers = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.904193] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] service_user.timeout = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.904389] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] spice.agent_enabled = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.904557] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] spice.enabled = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.904882] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.905097] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.905274] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] spice.html5proxy_port = 6082 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.905440] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] spice.image_compression = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.905607] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] spice.jpeg_compression = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.905771] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] spice.playback_compression = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.905943] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] spice.server_listen = 127.0.0.1 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.906125] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.906292] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] spice.streaming_mode = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.906452] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] spice.zlib_compression = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.906619] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] upgrade_levels.baseapi = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.906789] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] upgrade_levels.compute = auto {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.906952] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] upgrade_levels.conductor = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.907124] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] upgrade_levels.scheduler = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.907295] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vendordata_dynamic_auth.auth_section = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.907781] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vendordata_dynamic_auth.auth_type = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.907781] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vendordata_dynamic_auth.cafile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.907781] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vendordata_dynamic_auth.certfile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.907911] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.908128] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vendordata_dynamic_auth.insecure = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.908328] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vendordata_dynamic_auth.keyfile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.908500] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.908660] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vendordata_dynamic_auth.timeout = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.908832] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vmware.api_retry_count = 10 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.908997] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vmware.ca_file = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.909184] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vmware.cache_prefix = devstack-image-cache {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.909354] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vmware.cluster_name = testcl1 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.909520] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vmware.connection_pool_size = 10 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.909677] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vmware.console_delay_seconds = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.909856] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vmware.datastore_regex = ^datastore.* {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.910086] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.910262] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vmware.host_password = **** {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.910431] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vmware.host_port = 443 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.910600] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vmware.host_username = administrator@vsphere.local {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.910773] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vmware.insecure = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.910940] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vmware.integration_bridge = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.911119] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vmware.maximum_objects = 100 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.911284] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vmware.pbm_default_policy = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.911446] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vmware.pbm_enabled = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.911605] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vmware.pbm_wsdl_location = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.911795] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.911972] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vmware.serial_port_proxy_uri = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.912149] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vmware.serial_port_service_uri = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.912322] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vmware.task_poll_interval = 0.5 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.912496] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vmware.use_linked_clone = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.912668] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vmware.vnc_keymap = en-us {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.912865] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vmware.vnc_port = 5900 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.913047] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vmware.vnc_port_total = 10000 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.913241] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vnc.auth_schemes = ['none'] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.913418] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vnc.enabled = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.913740] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.913931] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.914146] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vnc.novncproxy_port = 6080 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.914352] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vnc.server_listen = 127.0.0.1 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.914529] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.914695] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vnc.vencrypt_ca_certs = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.914861] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vnc.vencrypt_client_cert = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.915038] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vnc.vencrypt_client_key = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.915226] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.915395] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] workarounds.disable_deep_image_inspection = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.915555] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.915718] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.915879] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.916052] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] workarounds.disable_rootwrap = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.916219] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] workarounds.enable_numa_live_migration = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.916379] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.916538] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.916698] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.916857] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] workarounds.libvirt_disable_apic = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.917029] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.917198] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.917360] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.917518] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.917677] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.917838] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.917999] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.918171] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.918331] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.918495] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.918679] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.918845] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] wsgi.client_socket_timeout = 900 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.919024] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] wsgi.default_pool_size = 1000 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.919190] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] wsgi.keep_alive = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.919355] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] wsgi.max_header_line = 16384 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.919516] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] wsgi.secure_proxy_ssl_header = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.919675] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] wsgi.ssl_ca_file = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.919877] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] wsgi.ssl_cert_file = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.920072] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] wsgi.ssl_key_file = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.920247] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] wsgi.tcp_keepidle = 600 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.920427] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.920595] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] zvm.ca_file = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.920758] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] zvm.cloud_connector_url = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.921084] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.921267] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] zvm.reachable_timeout = 300 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.921447] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_policy.enforce_new_defaults = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.921620] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_policy.enforce_scope = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.921841] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_policy.policy_default_rule = default {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.922053] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.922239] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_policy.policy_file = policy.yaml {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.922416] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.922580] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.922778] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.922933] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.923115] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.923289] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.923465] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.923642] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] profiler.connection_string = messaging:// {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.923808] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] profiler.enabled = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.923982] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] profiler.es_doc_type = notification {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.924186] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] profiler.es_scroll_size = 10000 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.924368] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] profiler.es_scroll_time = 2m {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.924532] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] profiler.filter_error_trace = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.924702] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] profiler.hmac_keys = **** {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.924873] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] profiler.sentinel_service_name = mymaster {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.925057] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] profiler.socket_timeout = 0.1 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.925229] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] profiler.trace_requests = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.925395] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] profiler.trace_sqlalchemy = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.925583] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] profiler_jaeger.process_tags = {} {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.925747] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] profiler_jaeger.service_name_prefix = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.925913] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] profiler_otlp.service_name_prefix = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.926093] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] remote_debug.host = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.926258] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] remote_debug.port = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.926439] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.926601] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.926764] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.926929] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.927097] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.927259] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.927419] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.927579] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.927741] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.927913] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.928085] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.928256] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.928423] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.928591] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.928759] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.928926] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.929096] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.929271] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.929432] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.929590] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.929759] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.930016] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.930245] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.930440] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.930606] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.930771] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.930937] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.931115] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.931286] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.931454] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.ssl = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.931627] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.931829] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.932014] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.932189] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.932359] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.ssl_version = {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.932524] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.932712] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.932881] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_notifications.retry = -1 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.933078] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.933256] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_messaging_notifications.transport_url = **** {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.933430] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_limit.auth_section = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.933593] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_limit.auth_type = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.933753] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_limit.cafile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.933912] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_limit.certfile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.934086] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_limit.collect_timing = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.934248] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_limit.connect_retries = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.934406] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_limit.connect_retry_delay = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.934565] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_limit.endpoint_id = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.934722] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_limit.endpoint_override = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.934884] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_limit.insecure = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.935057] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_limit.keyfile = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.935223] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_limit.max_version = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.935380] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_limit.min_version = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.935536] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_limit.region_name = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.935693] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_limit.retriable_status_codes = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.935883] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_limit.service_name = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.936019] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_limit.service_type = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.936181] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_limit.split_loggers = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.936341] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_limit.status_code_retries = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.936498] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_limit.status_code_retry_delay = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.936659] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_limit.timeout = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.936815] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_limit.valid_interfaces = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.936975] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_limit.version = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.937154] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_reports.file_event_handler = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.937318] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.937476] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] oslo_reports.log_dir = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.937646] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.937807] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.937970] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.938152] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.938319] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.938479] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.938648] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.938810] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vif_plug_ovs_privileged.group = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.938971] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.939147] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.939311] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.939475] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] vif_plug_ovs_privileged.user = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.939642] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] os_vif_linux_bridge.flat_interface = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.939848] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.940044] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.940226] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.940402] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.940571] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.940739] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.940907] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.941102] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.941280] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] os_vif_ovs.isolate_vif = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.941455] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.941620] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.941816] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.942042] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] os_vif_ovs.ovsdb_interface = native {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.942265] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] os_vif_ovs.per_port_bridge = False {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.942465] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] os_brick.lock_path = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.942637] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.942804] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.942982] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] privsep_osbrick.capabilities = [21] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.943157] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] privsep_osbrick.group = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.943319] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] privsep_osbrick.helper_command = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.943485] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.943649] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.943809] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] privsep_osbrick.user = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.943981] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.944156] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] nova_sys_admin.group = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.944314] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] nova_sys_admin.helper_command = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.944477] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.944639] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.944797] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] nova_sys_admin.user = None {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 557.944932] env[68798]: DEBUG oslo_service.service [None req-b8598837-dc40-4e03-a514-7de26ef95c09 None None] ******************************************************************************** {{(pid=68798) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2624}} [ 557.945812] env[68798]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 557.956167] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-aed887a0-4f13-48a3-a9a5-44d2610822e8 None None] Getting list of instances from cluster (obj){ [ 557.956167] env[68798]: value = "domain-c8" [ 557.956167] env[68798]: _type = "ClusterComputeResource" [ 557.956167] env[68798]: } {{(pid=68798) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 557.957429] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b5924e-79c7-457e-a15e-23d754245a0c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.967699] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-aed887a0-4f13-48a3-a9a5-44d2610822e8 None None] Got total of 0 instances {{(pid=68798) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 557.968247] env[68798]: WARNING nova.virt.vmwareapi.driver [None req-aed887a0-4f13-48a3-a9a5-44d2610822e8 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 557.968721] env[68798]: INFO nova.virt.node [None req-aed887a0-4f13-48a3-a9a5-44d2610822e8 None None] Generated node identity 855bb535-a51f-4f9d-8f32-8a3291b17319 [ 557.968965] env[68798]: INFO nova.virt.node [None req-aed887a0-4f13-48a3-a9a5-44d2610822e8 None None] Wrote node identity 855bb535-a51f-4f9d-8f32-8a3291b17319 to /opt/stack/data/n-cpu-1/compute_id [ 557.981835] env[68798]: WARNING nova.compute.manager [None req-aed887a0-4f13-48a3-a9a5-44d2610822e8 None None] Compute nodes ['855bb535-a51f-4f9d-8f32-8a3291b17319'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 558.019891] env[68798]: INFO nova.compute.manager [None req-aed887a0-4f13-48a3-a9a5-44d2610822e8 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 558.044223] env[68798]: WARNING nova.compute.manager [None req-aed887a0-4f13-48a3-a9a5-44d2610822e8 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 558.044481] env[68798]: DEBUG oslo_concurrency.lockutils [None req-aed887a0-4f13-48a3-a9a5-44d2610822e8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.044698] env[68798]: DEBUG oslo_concurrency.lockutils [None req-aed887a0-4f13-48a3-a9a5-44d2610822e8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.044851] env[68798]: DEBUG oslo_concurrency.lockutils [None req-aed887a0-4f13-48a3-a9a5-44d2610822e8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 558.045015] env[68798]: DEBUG nova.compute.resource_tracker [None req-aed887a0-4f13-48a3-a9a5-44d2610822e8 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 558.046105] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd11411d-8df8-40f7-beb6-da509eaa1b44 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.054705] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c228026-25a5-44fc-b6e6-f7f9b4b3204a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.068882] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da4a3fb9-6dc7-44dc-99a2-cc3081304858 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.075710] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29031083-ee5d-4405-a97a-e5f7e13e01dc {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.106619] env[68798]: DEBUG nova.compute.resource_tracker [None req-aed887a0-4f13-48a3-a9a5-44d2610822e8 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180754MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 558.106773] env[68798]: DEBUG oslo_concurrency.lockutils [None req-aed887a0-4f13-48a3-a9a5-44d2610822e8 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.106984] env[68798]: DEBUG oslo_concurrency.lockutils [None req-aed887a0-4f13-48a3-a9a5-44d2610822e8 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.119962] env[68798]: WARNING nova.compute.resource_tracker [None req-aed887a0-4f13-48a3-a9a5-44d2610822e8 None None] No compute node record for cpu-1:855bb535-a51f-4f9d-8f32-8a3291b17319: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 855bb535-a51f-4f9d-8f32-8a3291b17319 could not be found. [ 558.133856] env[68798]: INFO nova.compute.resource_tracker [None req-aed887a0-4f13-48a3-a9a5-44d2610822e8 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 855bb535-a51f-4f9d-8f32-8a3291b17319 [ 558.192552] env[68798]: DEBUG nova.compute.resource_tracker [None req-aed887a0-4f13-48a3-a9a5-44d2610822e8 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 558.192740] env[68798]: DEBUG nova.compute.resource_tracker [None req-aed887a0-4f13-48a3-a9a5-44d2610822e8 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 558.303121] env[68798]: INFO nova.scheduler.client.report [None req-aed887a0-4f13-48a3-a9a5-44d2610822e8 None None] [req-9beda9f1-8365-4592-a235-9b28fcee371b] Created resource provider record via placement API for resource provider with UUID 855bb535-a51f-4f9d-8f32-8a3291b17319 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 558.321146] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-395b908b-708f-4767-a627-0ca781623db0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.329252] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6b8c816-0198-442e-b833-77dbbe4693f0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.358913] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d380cf-654e-40d9-b1cb-00ec076d19a0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.366599] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d85c845-4a7c-4454-8886-a264de92df7b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.380530] env[68798]: DEBUG nova.compute.provider_tree [None req-aed887a0-4f13-48a3-a9a5-44d2610822e8 None None] Updating inventory in ProviderTree for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 558.419712] env[68798]: DEBUG nova.scheduler.client.report [None req-aed887a0-4f13-48a3-a9a5-44d2610822e8 None None] Updated inventory for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 558.419965] env[68798]: DEBUG nova.compute.provider_tree [None req-aed887a0-4f13-48a3-a9a5-44d2610822e8 None None] Updating resource provider 855bb535-a51f-4f9d-8f32-8a3291b17319 generation from 0 to 1 during operation: update_inventory {{(pid=68798) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 558.420131] env[68798]: DEBUG nova.compute.provider_tree [None req-aed887a0-4f13-48a3-a9a5-44d2610822e8 None None] Updating inventory in ProviderTree for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 558.470650] env[68798]: DEBUG nova.compute.provider_tree [None req-aed887a0-4f13-48a3-a9a5-44d2610822e8 None None] Updating resource provider 855bb535-a51f-4f9d-8f32-8a3291b17319 generation from 1 to 2 during operation: update_traits {{(pid=68798) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 558.489938] env[68798]: DEBUG nova.compute.resource_tracker [None req-aed887a0-4f13-48a3-a9a5-44d2610822e8 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 558.490149] env[68798]: DEBUG oslo_concurrency.lockutils [None req-aed887a0-4f13-48a3-a9a5-44d2610822e8 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.383s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 558.490316] env[68798]: DEBUG nova.service [None req-aed887a0-4f13-48a3-a9a5-44d2610822e8 None None] Creating RPC server for service compute {{(pid=68798) start /opt/stack/nova/nova/service.py:182}} [ 558.503577] env[68798]: DEBUG nova.service [None req-aed887a0-4f13-48a3-a9a5-44d2610822e8 None None] Join ServiceGroup membership for this service compute {{(pid=68798) start /opt/stack/nova/nova/service.py:199}} [ 558.503826] env[68798]: DEBUG nova.servicegroup.drivers.db [None req-aed887a0-4f13-48a3-a9a5-44d2610822e8 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=68798) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 567.781577] env[68798]: DEBUG dbcounter [-] [68798] Writing DB stats nova_cell1:SELECT=1 {{(pid=68798) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 567.782594] env[68798]: DEBUG dbcounter [-] [68798] Writing DB stats nova_cell0:SELECT=1 {{(pid=68798) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 574.505737] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._sync_power_states {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 574.516644] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Getting list of instances from cluster (obj){ [ 574.516644] env[68798]: value = "domain-c8" [ 574.516644] env[68798]: _type = "ClusterComputeResource" [ 574.516644] env[68798]: } {{(pid=68798) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 574.517789] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e993d104-0829-4b30-949b-6dcca93e62ef {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.527060] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Got total of 0 instances {{(pid=68798) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 574.527304] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 574.527634] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Getting list of instances from cluster (obj){ [ 574.527634] env[68798]: value = "domain-c8" [ 574.527634] env[68798]: _type = "ClusterComputeResource" [ 574.527634] env[68798]: } {{(pid=68798) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 574.528530] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-923bce58-7120-46fb-8b8f-1cf9ee9b33ee {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.537075] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Got total of 0 instances {{(pid=68798) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 602.746953] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Acquiring lock "8b5b6312-25fa-4eee-b951-88457b8e4fad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.746953] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Lock "8b5b6312-25fa-4eee-b951-88457b8e4fad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.782703] env[68798]: DEBUG nova.compute.manager [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 602.920995] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.921315] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.923319] env[68798]: INFO nova.compute.claims [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 602.969624] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Acquiring lock "df45083e-ece4-4768-bc08-022fe0185117" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.969624] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Lock "df45083e-ece4-4768-bc08-022fe0185117" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.987789] env[68798]: DEBUG nova.compute.manager [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 603.124719] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.201429] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37373d92-10d6-44d9-b310-ac8abb7dee6b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.214523] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-898c9236-5ae9-4a2b-9b5d-6b136a2ed677 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.251632] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf87fec-7f7d-4157-8873-024749a6018e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.261200] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdebfc41-221f-4cde-8a24-4be569f7e017 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.279151] env[68798]: DEBUG nova.compute.provider_tree [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 603.295778] env[68798]: DEBUG nova.scheduler.client.report [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 603.313564] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.392s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.314161] env[68798]: DEBUG nova.compute.manager [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 603.318399] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.194s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.320014] env[68798]: INFO nova.compute.claims [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 603.396517] env[68798]: DEBUG nova.compute.utils [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 603.398727] env[68798]: DEBUG nova.compute.manager [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Not allocating networking since 'none' was specified. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 603.423192] env[68798]: DEBUG nova.compute.manager [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 603.483102] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb2388ff-6fff-4390-961a-4a9070427d3a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.503733] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30c794d5-746a-4019-a122-b55fb635255b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.545678] env[68798]: DEBUG nova.compute.manager [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 603.548530] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ce3d5c8-6551-4dab-bdcb-26939518c81a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.565234] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09788ab8-e48b-40fa-83c5-9c5f049fe24d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.581317] env[68798]: DEBUG nova.compute.provider_tree [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 603.596023] env[68798]: DEBUG nova.scheduler.client.report [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 603.622470] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.304s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.622994] env[68798]: DEBUG nova.compute.manager [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 603.669071] env[68798]: DEBUG nova.compute.utils [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 603.670808] env[68798]: DEBUG nova.compute.manager [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 603.671091] env[68798]: DEBUG nova.network.neutron [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 603.693891] env[68798]: DEBUG nova.compute.manager [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 603.808127] env[68798]: DEBUG nova.compute.manager [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 603.852285] env[68798]: DEBUG nova.virt.hardware [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 603.852945] env[68798]: DEBUG nova.virt.hardware [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 603.853730] env[68798]: DEBUG nova.virt.hardware [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 603.853954] env[68798]: DEBUG nova.virt.hardware [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 603.854120] env[68798]: DEBUG nova.virt.hardware [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 603.854276] env[68798]: DEBUG nova.virt.hardware [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 603.854496] env[68798]: DEBUG nova.virt.hardware [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 603.854653] env[68798]: DEBUG nova.virt.hardware [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 603.855089] env[68798]: DEBUG nova.virt.hardware [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 603.855266] env[68798]: DEBUG nova.virt.hardware [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 603.855436] env[68798]: DEBUG nova.virt.hardware [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 603.861318] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e70eda2-8782-43dd-acf6-f85262422b11 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.878298] env[68798]: DEBUG nova.virt.hardware [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 603.878540] env[68798]: DEBUG nova.virt.hardware [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 603.878695] env[68798]: DEBUG nova.virt.hardware [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 603.878886] env[68798]: DEBUG nova.virt.hardware [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 603.879068] env[68798]: DEBUG nova.virt.hardware [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 603.879271] env[68798]: DEBUG nova.virt.hardware [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 603.879888] env[68798]: DEBUG nova.virt.hardware [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 603.879888] env[68798]: DEBUG nova.virt.hardware [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 603.879888] env[68798]: DEBUG nova.virt.hardware [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 603.880627] env[68798]: DEBUG nova.virt.hardware [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 603.880994] env[68798]: DEBUG nova.virt.hardware [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 603.883037] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c79fbcab-4f5c-41f6-8ada-a1d097e13f81 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.887839] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec69c330-081b-488b-924d-67ffa5433b27 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.917242] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a2d9e1-c1fb-4309-a1ef-c734aa00c6bb {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.937632] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ab28f4-39b7-425c-a0b9-0e7692b05f9c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.965166] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Instance VIF info [] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 603.974843] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 603.975298] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-20c871f1-1fed-488f-afaa-aa00433df5e4 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.992962] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Created folder: OpenStack in parent group-v4. [ 603.993161] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Creating folder: Project (b0561551fdcc4a0b88d55ee270ebf08b). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 603.993592] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-210bf732-c859-4cc0-b802-5c56e3148fb3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.007099] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Created folder: Project (b0561551fdcc4a0b88d55ee270ebf08b) in parent group-v834492. [ 604.007099] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Creating folder: Instances. Parent ref: group-v834493. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 604.007099] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1173ae42-b7a0-424c-bcef-993a9df6e806 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.020717] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Created folder: Instances in parent group-v834493. [ 604.020814] env[68798]: DEBUG oslo.service.loopingcall [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 604.024268] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 604.024268] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8fb3d775-a58e-47e5-8854-589bddd58d03 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.052240] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 604.052240] env[68798]: value = "task-4217509" [ 604.052240] env[68798]: _type = "Task" [ 604.052240] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.065042] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217509, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.112276] env[68798]: DEBUG nova.policy [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3590574e588843f0b7185e18fa92ccb6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e57669da30594d4abf484262539ea414', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 604.419868] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquiring lock "4995301f-e3c3-4032-adf0-7cffa3497d3e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.422418] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "4995301f-e3c3-4032-adf0-7cffa3497d3e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 604.446329] env[68798]: DEBUG nova.compute.manager [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 604.524831] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.525631] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 604.528430] env[68798]: INFO nova.compute.claims [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 604.568334] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217509, 'name': CreateVM_Task, 'duration_secs': 0.32023} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.568545] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 604.569571] env[68798]: DEBUG oslo_vmware.service [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02fede89-1300-408c-b873-aa106f3b0a1f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.582181] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 604.582181] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.583479] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 604.583479] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56adf82e-2967-4bbb-94e7-beb2aa7f2f1b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.592435] env[68798]: DEBUG oslo_vmware.api [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Waiting for the task: (returnval){ [ 604.592435] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52803147-aeb4-f0f3-f08c-6dbb894da680" [ 604.592435] env[68798]: _type = "Task" [ 604.592435] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.610024] env[68798]: DEBUG oslo_vmware.api [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52803147-aeb4-f0f3-f08c-6dbb894da680, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.724805] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42dbf45b-59f9-4876-a9ba-642812af841f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.734356] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11050fe8-1af2-4f16-bd52-55e353c75b85 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.770134] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43d615ac-6a54-4002-a5b3-437da46e335f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.776995] env[68798]: DEBUG oslo_concurrency.lockutils [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Acquiring lock "748df295-743b-41be-b873-523b688f2c78" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.777258] env[68798]: DEBUG oslo_concurrency.lockutils [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Lock "748df295-743b-41be-b873-523b688f2c78" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 604.787107] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0280de80-bf78-4556-950b-24457f0c9d0c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.795144] env[68798]: DEBUG nova.compute.manager [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 604.810776] env[68798]: DEBUG nova.compute.provider_tree [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 604.834396] env[68798]: DEBUG nova.scheduler.client.report [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 604.891183] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.365s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 604.891732] env[68798]: DEBUG nova.compute.manager [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 604.914030] env[68798]: DEBUG oslo_concurrency.lockutils [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.917962] env[68798]: DEBUG oslo_concurrency.lockutils [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 604.917962] env[68798]: INFO nova.compute.claims [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 604.946576] env[68798]: DEBUG nova.compute.utils [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 604.948000] env[68798]: DEBUG nova.compute.manager [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 604.952214] env[68798]: DEBUG nova.network.neutron [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 604.970546] env[68798]: DEBUG nova.compute.manager [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 605.111600] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 605.111892] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 605.112462] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 605.113852] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.113852] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 605.115979] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d9f2d0e4-967d-4891-bb8c-760660e51052 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.131637] env[68798]: DEBUG nova.compute.manager [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 605.137108] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 605.137108] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 605.139213] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d154f6bf-c381-4dde-bfcf-d84be8e2acf8 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.154908] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b42ce823-32e2-4b2b-9053-5e43bd679043 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.170451] env[68798]: DEBUG oslo_vmware.api [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Waiting for the task: (returnval){ [ 605.170451] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]524a59cd-fc38-99a3-398c-d197981b87c8" [ 605.170451] env[68798]: _type = "Task" [ 605.170451] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.179317] env[68798]: DEBUG nova.virt.hardware [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 605.179571] env[68798]: DEBUG nova.virt.hardware [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 605.179724] env[68798]: DEBUG nova.virt.hardware [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 605.179903] env[68798]: DEBUG nova.virt.hardware [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 605.180085] env[68798]: DEBUG nova.virt.hardware [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 605.180253] env[68798]: DEBUG nova.virt.hardware [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 605.180526] env[68798]: DEBUG nova.virt.hardware [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 605.180705] env[68798]: DEBUG nova.virt.hardware [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 605.180923] env[68798]: DEBUG nova.virt.hardware [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 605.181014] env[68798]: DEBUG nova.virt.hardware [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 605.183432] env[68798]: DEBUG nova.virt.hardware [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 605.184656] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd7a97d9-487c-439d-99f6-83ebcbceed35 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.201095] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89f0ca7f-0388-4bb7-a782-1972d22c73c6 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.206418] env[68798]: DEBUG nova.policy [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4591576f20d142a0a68342f8a1c9bfc4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f5b33dbd010340649a5c38226ec87f36', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 605.208750] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 605.208843] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Creating directory with path [datastore1] vmware_temp/7effcf7b-5dc7-4a40-9709-2c080d548ed8/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 605.210207] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3b59952e-0693-4ae2-a49e-e7952ee37431 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.242523] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11eba4cf-3aa4-464c-a343-0e8619e4259e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.246651] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Created directory with path [datastore1] vmware_temp/7effcf7b-5dc7-4a40-9709-2c080d548ed8/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 605.246695] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Fetch image to [datastore1] vmware_temp/7effcf7b-5dc7-4a40-9709-2c080d548ed8/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 605.247029] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/7effcf7b-5dc7-4a40-9709-2c080d548ed8/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 605.248170] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ac22e6-6d8a-47ad-8d3d-c71ad1cddffa {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.260462] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e11247b7-dfc4-432f-9592-8f0dad67fdef {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.269306] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0f0d5ab-099d-4e77-84ba-372992c58028 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.298643] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9e4bfeb-c30c-4a98-b062-b74399f4902e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.308035] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ead3858f-a0e5-4515-bd78-97a68b854567 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.319741] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03e32ae5-4a51-45b3-b75b-5160e80bfacf {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.351639] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f273873a-f5ff-4c69-a2a2-17a18f18af47 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.365141] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Acquiring lock "82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.365570] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Lock "82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.367159] env[68798]: DEBUG nova.compute.provider_tree [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 605.379698] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-befa9772-ffce-4372-802b-749df028e322 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.393760] env[68798]: DEBUG nova.compute.manager [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 605.398059] env[68798]: DEBUG nova.scheduler.client.report [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 605.409990] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 605.419208] env[68798]: DEBUG oslo_concurrency.lockutils [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.505s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 605.419716] env[68798]: DEBUG nova.compute.manager [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 605.485171] env[68798]: DEBUG nova.compute.utils [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 605.486596] env[68798]: DEBUG nova.compute.manager [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 605.486660] env[68798]: DEBUG nova.network.neutron [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 605.504786] env[68798]: DEBUG nova.compute.manager [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 605.525737] env[68798]: DEBUG oslo_vmware.rw_handles [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7effcf7b-5dc7-4a40-9709-2c080d548ed8/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 605.605504] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.605504] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.607431] env[68798]: INFO nova.compute.claims [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 605.617694] env[68798]: DEBUG oslo_vmware.rw_handles [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 605.618358] env[68798]: DEBUG oslo_vmware.rw_handles [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7effcf7b-5dc7-4a40-9709-2c080d548ed8/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 605.633890] env[68798]: DEBUG nova.network.neutron [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Successfully created port: b664da6d-a08e-4c49-90f2-b5f99288e69e {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 605.658777] env[68798]: DEBUG nova.compute.manager [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 605.699674] env[68798]: DEBUG nova.virt.hardware [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 605.699674] env[68798]: DEBUG nova.virt.hardware [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 605.699674] env[68798]: DEBUG nova.virt.hardware [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 605.700043] env[68798]: DEBUG nova.virt.hardware [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 605.700043] env[68798]: DEBUG nova.virt.hardware [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 605.700043] env[68798]: DEBUG nova.virt.hardware [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 605.700043] env[68798]: DEBUG nova.virt.hardware [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 605.700043] env[68798]: DEBUG nova.virt.hardware [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 605.700199] env[68798]: DEBUG nova.virt.hardware [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 605.700199] env[68798]: DEBUG nova.virt.hardware [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 605.700199] env[68798]: DEBUG nova.virt.hardware [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 605.700645] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc5ad7ad-53fd-4840-98d7-188f89cf0dd5 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.715386] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdca7b8c-8116-488b-b01a-592b3188872d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.825022] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70b2b8f9-52d8-4a46-a3cb-5a582fcf954e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.836518] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f8541f-b330-48be-93ac-92a6e3557d57 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.872711] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68b4b03f-be65-4cb7-a4dd-790aae7ebf56 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.881349] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f40c957f-d35f-4073-b453-49797a3a5f4b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.896653] env[68798]: DEBUG nova.compute.provider_tree [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 605.915564] env[68798]: DEBUG nova.scheduler.client.report [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 605.940020] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.331s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 605.940020] env[68798]: DEBUG nova.compute.manager [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 605.948021] env[68798]: DEBUG nova.policy [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '12f66b73d9d54c87954616c3d705268f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5446454a19514d22bc29c3b770523add', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 606.001465] env[68798]: DEBUG nova.compute.utils [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 606.002871] env[68798]: DEBUG nova.compute.manager [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 606.003081] env[68798]: DEBUG nova.network.neutron [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 606.018825] env[68798]: DEBUG nova.compute.manager [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 606.163252] env[68798]: DEBUG nova.compute.manager [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 606.196549] env[68798]: DEBUG nova.virt.hardware [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 606.196805] env[68798]: DEBUG nova.virt.hardware [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 606.196972] env[68798]: DEBUG nova.virt.hardware [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 606.199337] env[68798]: DEBUG nova.virt.hardware [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 606.199486] env[68798]: DEBUG nova.virt.hardware [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 606.199644] env[68798]: DEBUG nova.virt.hardware [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 606.199861] env[68798]: DEBUG nova.virt.hardware [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 606.200343] env[68798]: DEBUG nova.virt.hardware [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 606.200343] env[68798]: DEBUG nova.virt.hardware [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 606.200492] env[68798]: DEBUG nova.virt.hardware [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 606.200527] env[68798]: DEBUG nova.virt.hardware [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 606.202252] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bba3f5e1-b81e-4cfb-a05c-65f2c6fec67d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.217666] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96a65ae6-5a44-48c8-83cc-273830111085 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.296394] env[68798]: DEBUG nova.policy [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '46c14734675c4fc19fcad803253d5f0b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f639e783a9624c8fab36eaaabc1e00d4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 607.371353] env[68798]: DEBUG nova.network.neutron [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Successfully created port: 3ee8a559-eabf-4fd0-9d26-9e7a973a0929 {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 608.893726] env[68798]: DEBUG nova.network.neutron [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Successfully created port: 67a290b6-bffb-424a-bc8f-f802e9de1aae {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 608.987960] env[68798]: DEBUG nova.network.neutron [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Successfully updated port: b664da6d-a08e-4c49-90f2-b5f99288e69e {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 609.007391] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Acquiring lock "refresh_cache-df45083e-ece4-4768-bc08-022fe0185117" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 609.007520] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Acquired lock "refresh_cache-df45083e-ece4-4768-bc08-022fe0185117" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.007637] env[68798]: DEBUG nova.network.neutron [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 609.144204] env[68798]: DEBUG nova.network.neutron [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 609.253681] env[68798]: DEBUG nova.network.neutron [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Successfully created port: 538b953d-cce5-49e2-b2d6-0eae06f807a4 {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 609.801497] env[68798]: DEBUG nova.compute.manager [req-2d054ac7-864d-4694-94af-cc20c97f5d6c req-7ef02133-e15c-41e7-9715-487390802178 service nova] [instance: df45083e-ece4-4768-bc08-022fe0185117] Received event network-vif-plugged-b664da6d-a08e-4c49-90f2-b5f99288e69e {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 609.801761] env[68798]: DEBUG oslo_concurrency.lockutils [req-2d054ac7-864d-4694-94af-cc20c97f5d6c req-7ef02133-e15c-41e7-9715-487390802178 service nova] Acquiring lock "df45083e-ece4-4768-bc08-022fe0185117-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 609.802073] env[68798]: DEBUG oslo_concurrency.lockutils [req-2d054ac7-864d-4694-94af-cc20c97f5d6c req-7ef02133-e15c-41e7-9715-487390802178 service nova] Lock "df45083e-ece4-4768-bc08-022fe0185117-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 609.802595] env[68798]: DEBUG oslo_concurrency.lockutils [req-2d054ac7-864d-4694-94af-cc20c97f5d6c req-7ef02133-e15c-41e7-9715-487390802178 service nova] Lock "df45083e-ece4-4768-bc08-022fe0185117-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 609.802807] env[68798]: DEBUG nova.compute.manager [req-2d054ac7-864d-4694-94af-cc20c97f5d6c req-7ef02133-e15c-41e7-9715-487390802178 service nova] [instance: df45083e-ece4-4768-bc08-022fe0185117] No waiting events found dispatching network-vif-plugged-b664da6d-a08e-4c49-90f2-b5f99288e69e {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 609.803026] env[68798]: WARNING nova.compute.manager [req-2d054ac7-864d-4694-94af-cc20c97f5d6c req-7ef02133-e15c-41e7-9715-487390802178 service nova] [instance: df45083e-ece4-4768-bc08-022fe0185117] Received unexpected event network-vif-plugged-b664da6d-a08e-4c49-90f2-b5f99288e69e for instance with vm_state building and task_state spawning. [ 610.402326] env[68798]: DEBUG nova.network.neutron [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Updating instance_info_cache with network_info: [{"id": "b664da6d-a08e-4c49-90f2-b5f99288e69e", "address": "fa:16:3e:83:6a:e8", "network": {"id": "579f6646-7b99-4b4f-a7b6-3ef78f94147b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.162", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc340e748dae4a43b16acfcfeecd7cd0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb664da6d-a0", "ovs_interfaceid": "b664da6d-a08e-4c49-90f2-b5f99288e69e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.417555] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Releasing lock "refresh_cache-df45083e-ece4-4768-bc08-022fe0185117" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 610.417856] env[68798]: DEBUG nova.compute.manager [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Instance network_info: |[{"id": "b664da6d-a08e-4c49-90f2-b5f99288e69e", "address": "fa:16:3e:83:6a:e8", "network": {"id": "579f6646-7b99-4b4f-a7b6-3ef78f94147b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.162", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc340e748dae4a43b16acfcfeecd7cd0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb664da6d-a0", "ovs_interfaceid": "b664da6d-a08e-4c49-90f2-b5f99288e69e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 610.418352] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:6a:e8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '43ad01d2-c7dd-453c-a929-8ad76294d13c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b664da6d-a08e-4c49-90f2-b5f99288e69e', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 610.427982] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Creating folder: Project (e57669da30594d4abf484262539ea414). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 610.429093] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bfbd3cff-baee-4543-8a98-ec5eebbf165a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.441707] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Created folder: Project (e57669da30594d4abf484262539ea414) in parent group-v834492. [ 610.442217] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Creating folder: Instances. Parent ref: group-v834496. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 610.442561] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-26acb8a0-c697-47e6-9515-39b620c62123 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.455429] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Created folder: Instances in parent group-v834496. [ 610.455429] env[68798]: DEBUG oslo.service.loopingcall [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 610.457246] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df45083e-ece4-4768-bc08-022fe0185117] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 610.457246] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c4949726-6fda-4460-b2f6-047927a2b68c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.479867] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 610.479867] env[68798]: value = "task-4217512" [ 610.479867] env[68798]: _type = "Task" [ 610.479867] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.489708] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217512, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.999261] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217512, 'name': CreateVM_Task, 'duration_secs': 0.40225} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.999261] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df45083e-ece4-4768-bc08-022fe0185117] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 611.029264] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 611.029264] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.029264] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 611.029264] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a059f25-78e1-43b1-998c-c2357ef84454 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.037046] env[68798]: DEBUG oslo_vmware.api [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Waiting for the task: (returnval){ [ 611.037046] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]5207a21c-90df-d9e5-b9a2-fb8f7a30eedc" [ 611.037046] env[68798]: _type = "Task" [ 611.037046] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.052673] env[68798]: DEBUG oslo_vmware.api [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]5207a21c-90df-d9e5-b9a2-fb8f7a30eedc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.464217] env[68798]: DEBUG nova.network.neutron [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Successfully updated port: 3ee8a559-eabf-4fd0-9d26-9e7a973a0929 {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 611.482363] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquiring lock "refresh_cache-4995301f-e3c3-4032-adf0-7cffa3497d3e" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 611.482363] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquired lock "refresh_cache-4995301f-e3c3-4032-adf0-7cffa3497d3e" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.482363] env[68798]: DEBUG nova.network.neutron [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 611.549989] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 611.550109] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 611.550685] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 611.691213] env[68798]: DEBUG nova.network.neutron [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 612.093309] env[68798]: DEBUG nova.compute.manager [req-bfdbc681-8663-4e6c-b504-e73c32e8b33b req-03ee2304-9717-4a4d-9519-032404e8ab14 service nova] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Received event network-vif-plugged-3ee8a559-eabf-4fd0-9d26-9e7a973a0929 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 612.093309] env[68798]: DEBUG oslo_concurrency.lockutils [req-bfdbc681-8663-4e6c-b504-e73c32e8b33b req-03ee2304-9717-4a4d-9519-032404e8ab14 service nova] Acquiring lock "4995301f-e3c3-4032-adf0-7cffa3497d3e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 612.093438] env[68798]: DEBUG oslo_concurrency.lockutils [req-bfdbc681-8663-4e6c-b504-e73c32e8b33b req-03ee2304-9717-4a4d-9519-032404e8ab14 service nova] Lock "4995301f-e3c3-4032-adf0-7cffa3497d3e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 612.093504] env[68798]: DEBUG oslo_concurrency.lockutils [req-bfdbc681-8663-4e6c-b504-e73c32e8b33b req-03ee2304-9717-4a4d-9519-032404e8ab14 service nova] Lock "4995301f-e3c3-4032-adf0-7cffa3497d3e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 612.093690] env[68798]: DEBUG nova.compute.manager [req-bfdbc681-8663-4e6c-b504-e73c32e8b33b req-03ee2304-9717-4a4d-9519-032404e8ab14 service nova] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] No waiting events found dispatching network-vif-plugged-3ee8a559-eabf-4fd0-9d26-9e7a973a0929 {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 612.093866] env[68798]: WARNING nova.compute.manager [req-bfdbc681-8663-4e6c-b504-e73c32e8b33b req-03ee2304-9717-4a4d-9519-032404e8ab14 service nova] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Received unexpected event network-vif-plugged-3ee8a559-eabf-4fd0-9d26-9e7a973a0929 for instance with vm_state building and task_state spawning. [ 612.548242] env[68798]: DEBUG nova.network.neutron [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Successfully updated port: 67a290b6-bffb-424a-bc8f-f802e9de1aae {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 612.569366] env[68798]: DEBUG oslo_concurrency.lockutils [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Acquiring lock "refresh_cache-748df295-743b-41be-b873-523b688f2c78" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 612.569506] env[68798]: DEBUG oslo_concurrency.lockutils [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Acquired lock "refresh_cache-748df295-743b-41be-b873-523b688f2c78" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.569654] env[68798]: DEBUG nova.network.neutron [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 612.793740] env[68798]: DEBUG nova.network.neutron [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 612.901991] env[68798]: DEBUG nova.network.neutron [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Updating instance_info_cache with network_info: [{"id": "3ee8a559-eabf-4fd0-9d26-9e7a973a0929", "address": "fa:16:3e:da:e6:89", "network": {"id": "052d5114-ea0a-492e-aada-eb5e2258f0e6", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1119146414-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5b33dbd010340649a5c38226ec87f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ee8a559-ea", "ovs_interfaceid": "3ee8a559-eabf-4fd0-9d26-9e7a973a0929", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 612.920061] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Releasing lock "refresh_cache-4995301f-e3c3-4032-adf0-7cffa3497d3e" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 612.920061] env[68798]: DEBUG nova.compute.manager [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Instance network_info: |[{"id": "3ee8a559-eabf-4fd0-9d26-9e7a973a0929", "address": "fa:16:3e:da:e6:89", "network": {"id": "052d5114-ea0a-492e-aada-eb5e2258f0e6", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1119146414-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5b33dbd010340649a5c38226ec87f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ee8a559-ea", "ovs_interfaceid": "3ee8a559-eabf-4fd0-9d26-9e7a973a0929", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 612.920995] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:da:e6:89', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5e1049e8-c06b-4c93-a9e1-2cbb530f3f95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3ee8a559-eabf-4fd0-9d26-9e7a973a0929', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 612.931808] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Creating folder: Project (f5b33dbd010340649a5c38226ec87f36). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 612.933205] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-913ba2f3-fb13-470a-9aa1-0486930ec26c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.946998] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Created folder: Project (f5b33dbd010340649a5c38226ec87f36) in parent group-v834492. [ 612.947297] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Creating folder: Instances. Parent ref: group-v834499. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 612.947582] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-091a8f5d-e3c8-4233-9f74-76e8ec66e217 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.960019] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Created folder: Instances in parent group-v834499. [ 612.960019] env[68798]: DEBUG oslo.service.loopingcall [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 612.960159] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 612.960325] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-65954314-5f90-478f-af3c-ec85a52d3758 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.986879] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 612.986879] env[68798]: value = "task-4217515" [ 612.986879] env[68798]: _type = "Task" [ 612.986879] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.997296] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217515, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.053222] env[68798]: DEBUG nova.network.neutron [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Successfully updated port: 538b953d-cce5-49e2-b2d6-0eae06f807a4 {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 613.075817] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Acquiring lock "refresh_cache-82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 613.075974] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Acquired lock "refresh_cache-82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.076147] env[68798]: DEBUG nova.network.neutron [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 613.248483] env[68798]: DEBUG nova.network.neutron [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 613.499887] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217515, 'name': CreateVM_Task, 'duration_secs': 0.376059} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.500110] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 613.500749] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 613.500966] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.501234] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 613.501492] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cffa3dd0-07b3-49fd-aa4d-5575d8d7502c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.509484] env[68798]: DEBUG oslo_vmware.api [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Waiting for the task: (returnval){ [ 613.509484] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]5295236b-3ebb-291e-8761-612b1072908a" [ 613.509484] env[68798]: _type = "Task" [ 613.509484] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.518343] env[68798]: DEBUG oslo_vmware.api [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]5295236b-3ebb-291e-8761-612b1072908a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.590824] env[68798]: DEBUG nova.network.neutron [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Updating instance_info_cache with network_info: [{"id": "67a290b6-bffb-424a-bc8f-f802e9de1aae", "address": "fa:16:3e:83:f3:12", "network": {"id": "579f6646-7b99-4b4f-a7b6-3ef78f94147b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.117", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc340e748dae4a43b16acfcfeecd7cd0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67a290b6-bf", "ovs_interfaceid": "67a290b6-bffb-424a-bc8f-f802e9de1aae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.612629] env[68798]: DEBUG oslo_concurrency.lockutils [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Releasing lock "refresh_cache-748df295-743b-41be-b873-523b688f2c78" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 613.613047] env[68798]: DEBUG nova.compute.manager [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Instance network_info: |[{"id": "67a290b6-bffb-424a-bc8f-f802e9de1aae", "address": "fa:16:3e:83:f3:12", "network": {"id": "579f6646-7b99-4b4f-a7b6-3ef78f94147b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.117", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc340e748dae4a43b16acfcfeecd7cd0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67a290b6-bf", "ovs_interfaceid": "67a290b6-bffb-424a-bc8f-f802e9de1aae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 613.615890] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:f3:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '43ad01d2-c7dd-453c-a929-8ad76294d13c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '67a290b6-bffb-424a-bc8f-f802e9de1aae', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 613.625540] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Creating folder: Project (5446454a19514d22bc29c3b770523add). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 613.626384] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-39d75292-fe6d-413a-a5a1-adc7bc216560 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.641434] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Created folder: Project (5446454a19514d22bc29c3b770523add) in parent group-v834492. [ 613.641633] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Creating folder: Instances. Parent ref: group-v834502. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 613.643554] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9bfb80d2-f10c-488e-ac90-2a5c0063801e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.646783] env[68798]: DEBUG nova.compute.manager [req-84ab5b8f-a80c-4408-aa73-7d035650ffac req-e94e19da-40a3-4dc7-a3ef-05becf446f76 service nova] [instance: df45083e-ece4-4768-bc08-022fe0185117] Received event network-changed-b664da6d-a08e-4c49-90f2-b5f99288e69e {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 613.647291] env[68798]: DEBUG nova.compute.manager [req-84ab5b8f-a80c-4408-aa73-7d035650ffac req-e94e19da-40a3-4dc7-a3ef-05becf446f76 service nova] [instance: df45083e-ece4-4768-bc08-022fe0185117] Refreshing instance network info cache due to event network-changed-b664da6d-a08e-4c49-90f2-b5f99288e69e. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 613.647291] env[68798]: DEBUG oslo_concurrency.lockutils [req-84ab5b8f-a80c-4408-aa73-7d035650ffac req-e94e19da-40a3-4dc7-a3ef-05becf446f76 service nova] Acquiring lock "refresh_cache-df45083e-ece4-4768-bc08-022fe0185117" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 613.647291] env[68798]: DEBUG oslo_concurrency.lockutils [req-84ab5b8f-a80c-4408-aa73-7d035650ffac req-e94e19da-40a3-4dc7-a3ef-05becf446f76 service nova] Acquired lock "refresh_cache-df45083e-ece4-4768-bc08-022fe0185117" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.647572] env[68798]: DEBUG nova.network.neutron [req-84ab5b8f-a80c-4408-aa73-7d035650ffac req-e94e19da-40a3-4dc7-a3ef-05becf446f76 service nova] [instance: df45083e-ece4-4768-bc08-022fe0185117] Refreshing network info cache for port b664da6d-a08e-4c49-90f2-b5f99288e69e {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 613.661848] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Created folder: Instances in parent group-v834502. [ 613.665027] env[68798]: DEBUG oslo.service.loopingcall [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 613.665027] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 748df295-743b-41be-b873-523b688f2c78] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 613.665027] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cf67d021-3357-4ec5-806d-7280904c741c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.689402] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 613.689402] env[68798]: value = "task-4217518" [ 613.689402] env[68798]: _type = "Task" [ 613.689402] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.699633] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217518, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.024338] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 614.024626] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 614.024845] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 614.060765] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 614.060765] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 614.060765] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 614.060765] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 614.083134] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 614.083428] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: df45083e-ece4-4768-bc08-022fe0185117] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 614.083675] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 614.083967] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 748df295-743b-41be-b873-523b688f2c78] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 614.084229] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 614.084648] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 614.085391] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 614.085790] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 614.086132] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 614.086535] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 614.086844] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 614.087517] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 614.088110] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 614.088808] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 614.105101] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.105343] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 614.105513] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 614.105733] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 614.107152] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ccdcc18-4809-4d8c-9459-eb13572f8bb9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.120735] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b90b3ada-c7bd-4d4e-90ec-8a8fc42fc2f0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.137553] env[68798]: DEBUG nova.network.neutron [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Updating instance_info_cache with network_info: [{"id": "538b953d-cce5-49e2-b2d6-0eae06f807a4", "address": "fa:16:3e:64:c6:d2", "network": {"id": "579f6646-7b99-4b4f-a7b6-3ef78f94147b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.241", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc340e748dae4a43b16acfcfeecd7cd0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap538b953d-cc", "ovs_interfaceid": "538b953d-cce5-49e2-b2d6-0eae06f807a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.139209] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3428667f-b06e-4d75-9a4f-df50ebf4da37 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.149570] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e1982c-785f-43db-b9f9-1691a0c0741d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.155983] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Releasing lock "refresh_cache-82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 614.156342] env[68798]: DEBUG nova.compute.manager [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Instance network_info: |[{"id": "538b953d-cce5-49e2-b2d6-0eae06f807a4", "address": "fa:16:3e:64:c6:d2", "network": {"id": "579f6646-7b99-4b4f-a7b6-3ef78f94147b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.241", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc340e748dae4a43b16acfcfeecd7cd0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap538b953d-cc", "ovs_interfaceid": "538b953d-cce5-49e2-b2d6-0eae06f807a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 614.184360] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:c6:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '43ad01d2-c7dd-453c-a929-8ad76294d13c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '538b953d-cce5-49e2-b2d6-0eae06f807a4', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 614.194074] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Creating folder: Project (f639e783a9624c8fab36eaaabc1e00d4). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 614.194074] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180753MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 614.195927] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 614.195927] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 614.196373] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2d8f7b45-8ef1-428b-a460-51fb980d3fed {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.213487] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217518, 'name': CreateVM_Task, 'duration_secs': 0.449965} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.221391] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 748df295-743b-41be-b873-523b688f2c78] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 614.221647] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Created folder: Project (f639e783a9624c8fab36eaaabc1e00d4) in parent group-v834492. [ 614.221853] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Creating folder: Instances. Parent ref: group-v834505. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 614.222667] env[68798]: DEBUG oslo_concurrency.lockutils [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 614.222817] env[68798]: DEBUG oslo_concurrency.lockutils [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.223303] env[68798]: DEBUG oslo_concurrency.lockutils [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 614.223419] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-37432ff8-2153-4af6-a2d5-3ac6aaad96d9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.225896] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d230ded1-9986-49a1-9157-fa8172571615 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.235738] env[68798]: DEBUG oslo_vmware.api [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Waiting for the task: (returnval){ [ 614.235738] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]5273c4e7-b384-2994-5316-5352d9e9a2ee" [ 614.235738] env[68798]: _type = "Task" [ 614.235738] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.239583] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Created folder: Instances in parent group-v834505. [ 614.239738] env[68798]: DEBUG oslo.service.loopingcall [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 614.240361] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 614.240591] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bfe28eed-f191-4164-801a-e97fee579c32 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.261775] env[68798]: DEBUG oslo_vmware.api [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]5273c4e7-b384-2994-5316-5352d9e9a2ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.273683] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 614.273683] env[68798]: value = "task-4217521" [ 614.273683] env[68798]: _type = "Task" [ 614.273683] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.283712] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217521, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.314225] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 8b5b6312-25fa-4eee-b951-88457b8e4fad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 614.314480] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance df45083e-ece4-4768-bc08-022fe0185117 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 614.314664] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 4995301f-e3c3-4032-adf0-7cffa3497d3e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 614.314821] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 748df295-743b-41be-b873-523b688f2c78 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 614.314981] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 614.317215] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 614.317215] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 614.436194] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83ca5e0f-c874-45d7-9ac7-d15a233d5d56 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.447582] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d2ba9e-fa72-4718-86bb-f9b5ab189216 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.488177] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a2bce1e-5178-442b-a54c-79ab22aa5771 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.500888] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d868b8b-3e3b-4d58-981c-bea85b148c5e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.518311] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 614.529668] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 614.558443] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 614.558786] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.364s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 614.746863] env[68798]: DEBUG oslo_concurrency.lockutils [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 614.747355] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 614.747618] env[68798]: DEBUG oslo_concurrency.lockutils [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 614.787311] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217521, 'name': CreateVM_Task, 'duration_secs': 0.355491} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.787605] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 614.788453] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 614.788610] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.789043] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 614.789198] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60e95814-fe00-4e9c-9401-789ce011684f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.796199] env[68798]: DEBUG oslo_vmware.api [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Waiting for the task: (returnval){ [ 614.796199] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52a89ed0-ca6b-d915-923d-bb54f9ab9635" [ 614.796199] env[68798]: _type = "Task" [ 614.796199] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.805755] env[68798]: DEBUG oslo_vmware.api [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52a89ed0-ca6b-d915-923d-bb54f9ab9635, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.958716] env[68798]: DEBUG nova.network.neutron [req-84ab5b8f-a80c-4408-aa73-7d035650ffac req-e94e19da-40a3-4dc7-a3ef-05becf446f76 service nova] [instance: df45083e-ece4-4768-bc08-022fe0185117] Updated VIF entry in instance network info cache for port b664da6d-a08e-4c49-90f2-b5f99288e69e. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 614.959340] env[68798]: DEBUG nova.network.neutron [req-84ab5b8f-a80c-4408-aa73-7d035650ffac req-e94e19da-40a3-4dc7-a3ef-05becf446f76 service nova] [instance: df45083e-ece4-4768-bc08-022fe0185117] Updating instance_info_cache with network_info: [{"id": "b664da6d-a08e-4c49-90f2-b5f99288e69e", "address": "fa:16:3e:83:6a:e8", "network": {"id": "579f6646-7b99-4b4f-a7b6-3ef78f94147b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.162", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc340e748dae4a43b16acfcfeecd7cd0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb664da6d-a0", "ovs_interfaceid": "b664da6d-a08e-4c49-90f2-b5f99288e69e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.976282] env[68798]: DEBUG oslo_concurrency.lockutils [req-84ab5b8f-a80c-4408-aa73-7d035650ffac req-e94e19da-40a3-4dc7-a3ef-05becf446f76 service nova] Releasing lock "refresh_cache-df45083e-ece4-4768-bc08-022fe0185117" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 615.310529] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 615.310928] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 615.311246] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 615.480540] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Acquiring lock "c1606420-0fd3-4bd3-a8fa-91772c11f9bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.480540] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Lock "c1606420-0fd3-4bd3-a8fa-91772c11f9bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.498526] env[68798]: DEBUG nova.compute.manager [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 615.572499] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.572730] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.574948] env[68798]: INFO nova.compute.claims [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 615.814176] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e19c4281-81ff-4945-97c1-f2df8645f29b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.824905] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2aa8560-6b34-4b05-8e28-e1af99f91109 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.871271] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8534985f-bb32-495f-a7e9-a1549c3aa423 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.877959] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c72f6451-035d-48ab-877b-605637727862 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.893756] env[68798]: DEBUG nova.compute.provider_tree [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 615.906959] env[68798]: DEBUG nova.scheduler.client.report [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 615.923598] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.351s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 615.924346] env[68798]: DEBUG nova.compute.manager [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 615.981995] env[68798]: DEBUG nova.compute.utils [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 615.989023] env[68798]: DEBUG nova.compute.manager [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 615.989023] env[68798]: DEBUG nova.network.neutron [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 616.022466] env[68798]: DEBUG nova.compute.manager [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 616.158371] env[68798]: DEBUG nova.compute.manager [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 616.198031] env[68798]: DEBUG nova.virt.hardware [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 616.198031] env[68798]: DEBUG nova.virt.hardware [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 616.198031] env[68798]: DEBUG nova.virt.hardware [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 616.198344] env[68798]: DEBUG nova.virt.hardware [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 616.198344] env[68798]: DEBUG nova.virt.hardware [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 616.198344] env[68798]: DEBUG nova.virt.hardware [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 616.198344] env[68798]: DEBUG nova.virt.hardware [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 616.198344] env[68798]: DEBUG nova.virt.hardware [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 616.198802] env[68798]: DEBUG nova.virt.hardware [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 616.198802] env[68798]: DEBUG nova.virt.hardware [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 616.198802] env[68798]: DEBUG nova.virt.hardware [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 616.200023] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06bbe9e1-1476-477c-86ee-f27a05a9ba25 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.210575] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f3647d4-dba2-4159-bc13-01e710500bbb {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.328532] env[68798]: DEBUG nova.policy [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cd1731da1cb64d438a520ed45bea191e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cc3c3581f8d34668807f65cefe23fe5f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 616.414365] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Acquiring lock "90d9df19-2d93-4543-a650-4a624f505d5d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.414593] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Lock "90d9df19-2d93-4543-a650-4a624f505d5d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 616.428949] env[68798]: DEBUG nova.compute.manager [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 616.517557] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.517819] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 616.519341] env[68798]: INFO nova.compute.claims [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 616.734935] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-080ec990-c086-4e88-9b5a-9174d507d4df {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.744146] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1610c160-d6fe-4402-bac2-347328e75557 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.787020] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b972b5d9-4557-4afc-afde-2d8bb580660a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.795779] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eac3d113-112c-4ae5-99f0-1bb8cb1e2d0e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.811957] env[68798]: DEBUG nova.compute.provider_tree [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 616.822198] env[68798]: DEBUG nova.compute.manager [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Received event network-changed-3ee8a559-eabf-4fd0-9d26-9e7a973a0929 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 616.822446] env[68798]: DEBUG nova.compute.manager [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Refreshing instance network info cache due to event network-changed-3ee8a559-eabf-4fd0-9d26-9e7a973a0929. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 616.822649] env[68798]: DEBUG oslo_concurrency.lockutils [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] Acquiring lock "refresh_cache-4995301f-e3c3-4032-adf0-7cffa3497d3e" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 616.822764] env[68798]: DEBUG oslo_concurrency.lockutils [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] Acquired lock "refresh_cache-4995301f-e3c3-4032-adf0-7cffa3497d3e" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 616.822929] env[68798]: DEBUG nova.network.neutron [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Refreshing network info cache for port 3ee8a559-eabf-4fd0-9d26-9e7a973a0929 {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 616.828233] env[68798]: DEBUG nova.scheduler.client.report [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 616.856245] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.336s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 616.856245] env[68798]: DEBUG nova.compute.manager [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 616.894239] env[68798]: DEBUG nova.compute.utils [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 616.897995] env[68798]: DEBUG nova.compute.manager [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Not allocating networking since 'none' was specified. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 616.907892] env[68798]: DEBUG nova.compute.manager [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 617.020375] env[68798]: DEBUG nova.compute.manager [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 617.058162] env[68798]: DEBUG nova.virt.hardware [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 617.059021] env[68798]: DEBUG nova.virt.hardware [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 617.059021] env[68798]: DEBUG nova.virt.hardware [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 617.059319] env[68798]: DEBUG nova.virt.hardware [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 617.061028] env[68798]: DEBUG nova.virt.hardware [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 617.061028] env[68798]: DEBUG nova.virt.hardware [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 617.061028] env[68798]: DEBUG nova.virt.hardware [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 617.061028] env[68798]: DEBUG nova.virt.hardware [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 617.061028] env[68798]: DEBUG nova.virt.hardware [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 617.061394] env[68798]: DEBUG nova.virt.hardware [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 617.061394] env[68798]: DEBUG nova.virt.hardware [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 617.062711] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742c8640-7799-4837-ad5d-19e6b5e1adee {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.076585] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38b883a1-10a6-4087-bce3-2f53c79ff501 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.094770] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Instance VIF info [] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 617.100863] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Creating folder: Project (cc867adbb847422bbc9a34def8eca1ff). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 617.102377] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b8efbc16-d86e-40b1-bc4b-9e6e20b50a8c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.117113] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Created folder: Project (cc867adbb847422bbc9a34def8eca1ff) in parent group-v834492. [ 617.117382] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Creating folder: Instances. Parent ref: group-v834508. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 617.117655] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-08d2bd76-9a49-42b5-963e-8c82e3ea483c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.131694] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Created folder: Instances in parent group-v834508. [ 617.132263] env[68798]: DEBUG oslo.service.loopingcall [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 617.132568] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 617.132827] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5c898704-5c6b-4aea-b71f-0299e55ccb43 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.156929] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 617.156929] env[68798]: value = "task-4217524" [ 617.156929] env[68798]: _type = "Task" [ 617.156929] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.167819] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217524, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.671056] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217524, 'name': CreateVM_Task, 'duration_secs': 0.354719} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.673907] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 617.674649] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 617.675095] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.675768] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 617.676412] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9af85416-edff-4eb1-9f91-88e7d490c87f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.683965] env[68798]: DEBUG oslo_vmware.api [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Waiting for the task: (returnval){ [ 617.683965] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52eba988-3026-f610-bca0-422122f600ad" [ 617.683965] env[68798]: _type = "Task" [ 617.683965] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.698463] env[68798]: DEBUG oslo_vmware.api [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52eba988-3026-f610-bca0-422122f600ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.201923] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 618.202172] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 618.202625] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 618.239988] env[68798]: DEBUG nova.network.neutron [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Updated VIF entry in instance network info cache for port 3ee8a559-eabf-4fd0-9d26-9e7a973a0929. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 618.240353] env[68798]: DEBUG nova.network.neutron [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Updating instance_info_cache with network_info: [{"id": "3ee8a559-eabf-4fd0-9d26-9e7a973a0929", "address": "fa:16:3e:da:e6:89", "network": {"id": "052d5114-ea0a-492e-aada-eb5e2258f0e6", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1119146414-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5b33dbd010340649a5c38226ec87f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ee8a559-ea", "ovs_interfaceid": "3ee8a559-eabf-4fd0-9d26-9e7a973a0929", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.255020] env[68798]: DEBUG oslo_concurrency.lockutils [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] Releasing lock "refresh_cache-4995301f-e3c3-4032-adf0-7cffa3497d3e" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 618.255303] env[68798]: DEBUG nova.compute.manager [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] [instance: 748df295-743b-41be-b873-523b688f2c78] Received event network-vif-plugged-67a290b6-bffb-424a-bc8f-f802e9de1aae {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 618.255568] env[68798]: DEBUG oslo_concurrency.lockutils [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] Acquiring lock "748df295-743b-41be-b873-523b688f2c78-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 618.255794] env[68798]: DEBUG oslo_concurrency.lockutils [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] Lock "748df295-743b-41be-b873-523b688f2c78-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 618.256057] env[68798]: DEBUG oslo_concurrency.lockutils [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] Lock "748df295-743b-41be-b873-523b688f2c78-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 618.256322] env[68798]: DEBUG nova.compute.manager [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] [instance: 748df295-743b-41be-b873-523b688f2c78] No waiting events found dispatching network-vif-plugged-67a290b6-bffb-424a-bc8f-f802e9de1aae {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 618.256397] env[68798]: WARNING nova.compute.manager [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] [instance: 748df295-743b-41be-b873-523b688f2c78] Received unexpected event network-vif-plugged-67a290b6-bffb-424a-bc8f-f802e9de1aae for instance with vm_state building and task_state spawning. [ 618.257145] env[68798]: DEBUG nova.compute.manager [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Received event network-vif-plugged-538b953d-cce5-49e2-b2d6-0eae06f807a4 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 618.257645] env[68798]: DEBUG oslo_concurrency.lockutils [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] Acquiring lock "82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 618.257920] env[68798]: DEBUG oslo_concurrency.lockutils [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] Lock "82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 618.258107] env[68798]: DEBUG oslo_concurrency.lockutils [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] Lock "82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 618.258289] env[68798]: DEBUG nova.compute.manager [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] No waiting events found dispatching network-vif-plugged-538b953d-cce5-49e2-b2d6-0eae06f807a4 {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 618.258498] env[68798]: WARNING nova.compute.manager [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Received unexpected event network-vif-plugged-538b953d-cce5-49e2-b2d6-0eae06f807a4 for instance with vm_state building and task_state spawning. [ 618.258733] env[68798]: DEBUG nova.compute.manager [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] [instance: 748df295-743b-41be-b873-523b688f2c78] Received event network-changed-67a290b6-bffb-424a-bc8f-f802e9de1aae {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 618.259243] env[68798]: DEBUG nova.compute.manager [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] [instance: 748df295-743b-41be-b873-523b688f2c78] Refreshing instance network info cache due to event network-changed-67a290b6-bffb-424a-bc8f-f802e9de1aae. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 618.259443] env[68798]: DEBUG oslo_concurrency.lockutils [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] Acquiring lock "refresh_cache-748df295-743b-41be-b873-523b688f2c78" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 618.259695] env[68798]: DEBUG oslo_concurrency.lockutils [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] Acquired lock "refresh_cache-748df295-743b-41be-b873-523b688f2c78" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.259806] env[68798]: DEBUG nova.network.neutron [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] [instance: 748df295-743b-41be-b873-523b688f2c78] Refreshing network info cache for port 67a290b6-bffb-424a-bc8f-f802e9de1aae {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 618.263274] env[68798]: DEBUG nova.network.neutron [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Successfully created port: 3eb0c70c-c5a8-468c-8eab-eb5a5367f27f {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 620.426845] env[68798]: DEBUG nova.network.neutron [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] [instance: 748df295-743b-41be-b873-523b688f2c78] Updated VIF entry in instance network info cache for port 67a290b6-bffb-424a-bc8f-f802e9de1aae. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 620.426845] env[68798]: DEBUG nova.network.neutron [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] [instance: 748df295-743b-41be-b873-523b688f2c78] Updating instance_info_cache with network_info: [{"id": "67a290b6-bffb-424a-bc8f-f802e9de1aae", "address": "fa:16:3e:83:f3:12", "network": {"id": "579f6646-7b99-4b4f-a7b6-3ef78f94147b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.117", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc340e748dae4a43b16acfcfeecd7cd0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67a290b6-bf", "ovs_interfaceid": "67a290b6-bffb-424a-bc8f-f802e9de1aae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.441707] env[68798]: DEBUG oslo_concurrency.lockutils [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] Releasing lock "refresh_cache-748df295-743b-41be-b873-523b688f2c78" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 620.442023] env[68798]: DEBUG nova.compute.manager [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Received event network-changed-538b953d-cce5-49e2-b2d6-0eae06f807a4 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 620.442212] env[68798]: DEBUG nova.compute.manager [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Refreshing instance network info cache due to event network-changed-538b953d-cce5-49e2-b2d6-0eae06f807a4. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 620.442418] env[68798]: DEBUG oslo_concurrency.lockutils [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] Acquiring lock "refresh_cache-82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 620.442558] env[68798]: DEBUG oslo_concurrency.lockutils [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] Acquired lock "refresh_cache-82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.442712] env[68798]: DEBUG nova.network.neutron [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Refreshing network info cache for port 538b953d-cce5-49e2-b2d6-0eae06f807a4 {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 622.137427] env[68798]: DEBUG nova.network.neutron [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Updated VIF entry in instance network info cache for port 538b953d-cce5-49e2-b2d6-0eae06f807a4. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 622.137427] env[68798]: DEBUG nova.network.neutron [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Updating instance_info_cache with network_info: [{"id": "538b953d-cce5-49e2-b2d6-0eae06f807a4", "address": "fa:16:3e:64:c6:d2", "network": {"id": "579f6646-7b99-4b4f-a7b6-3ef78f94147b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.241", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc340e748dae4a43b16acfcfeecd7cd0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap538b953d-cc", "ovs_interfaceid": "538b953d-cce5-49e2-b2d6-0eae06f807a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.159369] env[68798]: DEBUG oslo_concurrency.lockutils [req-39100719-254e-477a-a414-944216d15897 req-a8e8f7e2-2324-40fc-a282-50c615e5bb64 service nova] Releasing lock "refresh_cache-82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.520555] env[68798]: DEBUG nova.network.neutron [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Successfully updated port: 3eb0c70c-c5a8-468c-8eab-eb5a5367f27f {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 623.536619] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Acquiring lock "refresh_cache-c1606420-0fd3-4bd3-a8fa-91772c11f9bf" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 623.537363] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Acquired lock "refresh_cache-c1606420-0fd3-4bd3-a8fa-91772c11f9bf" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.537602] env[68798]: DEBUG nova.network.neutron [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 623.765764] env[68798]: DEBUG nova.network.neutron [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 624.177141] env[68798]: DEBUG nova.compute.manager [req-036e0b24-dc77-43bb-98e5-8d91d3ef259f req-513d7626-49a4-422d-98d8-43ebfe59f94b service nova] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Received event network-vif-plugged-3eb0c70c-c5a8-468c-8eab-eb5a5367f27f {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 624.177141] env[68798]: DEBUG oslo_concurrency.lockutils [req-036e0b24-dc77-43bb-98e5-8d91d3ef259f req-513d7626-49a4-422d-98d8-43ebfe59f94b service nova] Acquiring lock "c1606420-0fd3-4bd3-a8fa-91772c11f9bf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.177141] env[68798]: DEBUG oslo_concurrency.lockutils [req-036e0b24-dc77-43bb-98e5-8d91d3ef259f req-513d7626-49a4-422d-98d8-43ebfe59f94b service nova] Lock "c1606420-0fd3-4bd3-a8fa-91772c11f9bf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.177141] env[68798]: DEBUG oslo_concurrency.lockutils [req-036e0b24-dc77-43bb-98e5-8d91d3ef259f req-513d7626-49a4-422d-98d8-43ebfe59f94b service nova] Lock "c1606420-0fd3-4bd3-a8fa-91772c11f9bf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.177347] env[68798]: DEBUG nova.compute.manager [req-036e0b24-dc77-43bb-98e5-8d91d3ef259f req-513d7626-49a4-422d-98d8-43ebfe59f94b service nova] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] No waiting events found dispatching network-vif-plugged-3eb0c70c-c5a8-468c-8eab-eb5a5367f27f {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 624.177347] env[68798]: WARNING nova.compute.manager [req-036e0b24-dc77-43bb-98e5-8d91d3ef259f req-513d7626-49a4-422d-98d8-43ebfe59f94b service nova] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Received unexpected event network-vif-plugged-3eb0c70c-c5a8-468c-8eab-eb5a5367f27f for instance with vm_state building and task_state spawning. [ 625.371185] env[68798]: DEBUG nova.network.neutron [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Updating instance_info_cache with network_info: [{"id": "3eb0c70c-c5a8-468c-8eab-eb5a5367f27f", "address": "fa:16:3e:a8:82:65", "network": {"id": "5740656c-19ac-4048-a36f-b2687c3f3e52", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2090220514-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc3c3581f8d34668807f65cefe23fe5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3eb0c70c-c5", "ovs_interfaceid": "3eb0c70c-c5a8-468c-8eab-eb5a5367f27f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.386266] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Releasing lock "refresh_cache-c1606420-0fd3-4bd3-a8fa-91772c11f9bf" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 625.387795] env[68798]: DEBUG nova.compute.manager [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Instance network_info: |[{"id": "3eb0c70c-c5a8-468c-8eab-eb5a5367f27f", "address": "fa:16:3e:a8:82:65", "network": {"id": "5740656c-19ac-4048-a36f-b2687c3f3e52", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2090220514-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc3c3581f8d34668807f65cefe23fe5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3eb0c70c-c5", "ovs_interfaceid": "3eb0c70c-c5a8-468c-8eab-eb5a5367f27f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 625.388332] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a8:82:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd33839ae-40ca-471b-92e3-eb282b920682', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3eb0c70c-c5a8-468c-8eab-eb5a5367f27f', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 625.399536] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Creating folder: Project (cc3c3581f8d34668807f65cefe23fe5f). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 625.401629] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a7b19b39-6c16-490c-813d-12ac669a6ca8 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.411679] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Created folder: Project (cc3c3581f8d34668807f65cefe23fe5f) in parent group-v834492. [ 625.411901] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Creating folder: Instances. Parent ref: group-v834511. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 625.412177] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-23677ba3-224d-4ca2-bbbd-1e22a0204c05 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.423087] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Created folder: Instances in parent group-v834511. [ 625.423087] env[68798]: DEBUG oslo.service.loopingcall [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 625.423087] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 625.423567] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3cb1f853-8ff5-4b0c-b3de-59d46b612af1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.451603] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 625.451603] env[68798]: value = "task-4217527" [ 625.451603] env[68798]: _type = "Task" [ 625.451603] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.463128] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217527, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.966232] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217527, 'name': CreateVM_Task, 'duration_secs': 0.335156} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.966232] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 625.969130] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 625.969130] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.969130] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 625.969130] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3edf6d37-1902-496f-bced-91bdd13d85b0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.976067] env[68798]: DEBUG oslo_vmware.api [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Waiting for the task: (returnval){ [ 625.976067] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52d9c852-1507-ffa0-5d72-ad5a74dc0cbe" [ 625.976067] env[68798]: _type = "Task" [ 625.976067] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.993786] env[68798]: DEBUG oslo_vmware.api [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52d9c852-1507-ffa0-5d72-ad5a74dc0cbe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.445979] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Acquiring lock "620ef3f6-0444-474d-8179-3dc0143f2e99" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.446652] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Lock "620ef3f6-0444-474d-8179-3dc0143f2e99" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.482101] env[68798]: DEBUG nova.compute.manager [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 626.498167] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 626.498523] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 626.499367] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 626.580156] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.580523] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.002s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.582930] env[68798]: INFO nova.compute.claims [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 626.856760] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a6fb4b-13b2-4e84-9b8f-c4ef19b74f1b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.867598] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4b4b82c-7533-4d20-bd35-f7f194e01f6c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.907958] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfd19e6d-5f34-4d35-8b6b-49d591f0a262 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.917639] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69e72fff-2f76-4ed9-a613-4ab0d8771834 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.936508] env[68798]: DEBUG nova.compute.provider_tree [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 626.953218] env[68798]: DEBUG nova.scheduler.client.report [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 626.980083] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.399s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 626.980740] env[68798]: DEBUG nova.compute.manager [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 627.033353] env[68798]: DEBUG nova.compute.utils [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 627.034761] env[68798]: DEBUG nova.compute.manager [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 627.034968] env[68798]: DEBUG nova.network.neutron [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 627.048735] env[68798]: DEBUG nova.compute.manager [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 627.160595] env[68798]: DEBUG nova.compute.manager [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 627.196382] env[68798]: DEBUG nova.virt.hardware [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 627.196382] env[68798]: DEBUG nova.virt.hardware [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 627.196382] env[68798]: DEBUG nova.virt.hardware [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 627.196646] env[68798]: DEBUG nova.virt.hardware [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 627.196685] env[68798]: DEBUG nova.virt.hardware [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 627.196971] env[68798]: DEBUG nova.virt.hardware [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 627.197963] env[68798]: DEBUG nova.virt.hardware [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 627.200446] env[68798]: DEBUG nova.virt.hardware [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 627.200446] env[68798]: DEBUG nova.virt.hardware [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 627.200446] env[68798]: DEBUG nova.virt.hardware [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 627.200446] env[68798]: DEBUG nova.virt.hardware [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 627.200788] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a720e6-4412-430e-a62e-10731c02e286 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.217762] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e2e7d3b-7836-4411-a022-08a9ac5e74ab {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.473985] env[68798]: DEBUG nova.policy [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3f78066c07a8454cb2e3696e45ab5ff6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dadfddb85ecc42e78957ff79ec19aa44', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 627.568546] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Acquiring lock "e060aaea-7508-46ed-8786-b5753fde75e9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.569033] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Lock "e060aaea-7508-46ed-8786-b5753fde75e9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 627.585615] env[68798]: DEBUG nova.compute.manager [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 627.656164] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.656423] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 627.657948] env[68798]: INFO nova.compute.claims [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 627.922876] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f8b987a-90a0-40ec-8fa1-704fe7fa4d30 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.932836] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4347777-d4f4-4b4e-a042-a3c47a051644 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.970851] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2f551d8-848a-4418-8f8b-29f769b4a6fb {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.983010] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62e2f4aa-a20e-425c-8929-984e1de4ef76 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.001479] env[68798]: DEBUG nova.compute.provider_tree [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 628.013081] env[68798]: DEBUG nova.scheduler.client.report [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 628.032390] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.376s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.032813] env[68798]: DEBUG nova.compute.manager [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 628.084400] env[68798]: DEBUG nova.compute.utils [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 628.085930] env[68798]: DEBUG nova.compute.manager [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 628.086085] env[68798]: DEBUG nova.network.neutron [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 628.101614] env[68798]: DEBUG nova.compute.manager [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 628.223698] env[68798]: DEBUG nova.compute.manager [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 628.254827] env[68798]: DEBUG nova.virt.hardware [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 628.255094] env[68798]: DEBUG nova.virt.hardware [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 628.255267] env[68798]: DEBUG nova.virt.hardware [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 628.255682] env[68798]: DEBUG nova.virt.hardware [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 628.255823] env[68798]: DEBUG nova.virt.hardware [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 628.256058] env[68798]: DEBUG nova.virt.hardware [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 628.256236] env[68798]: DEBUG nova.virt.hardware [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 628.256529] env[68798]: DEBUG nova.virt.hardware [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 628.256597] env[68798]: DEBUG nova.virt.hardware [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 628.256750] env[68798]: DEBUG nova.virt.hardware [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 628.256922] env[68798]: DEBUG nova.virt.hardware [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 628.258012] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06f8894c-8b06-4b5f-b075-cd0b7bda9cdd {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.268843] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2284ae61-bbc2-4294-8834-c282c95ef1ba {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.788024] env[68798]: DEBUG nova.policy [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e6d4303727be4f5d87c1477ce1da5ea5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '49b472c72e5b4f9897b505fa5c68e9f2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 629.149882] env[68798]: DEBUG nova.compute.manager [req-4d12788e-5e2d-4264-baef-13dedcd6a141 req-20e54eff-322d-4bcb-929d-90028277303e service nova] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Received event network-changed-3eb0c70c-c5a8-468c-8eab-eb5a5367f27f {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 629.149882] env[68798]: DEBUG nova.compute.manager [req-4d12788e-5e2d-4264-baef-13dedcd6a141 req-20e54eff-322d-4bcb-929d-90028277303e service nova] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Refreshing instance network info cache due to event network-changed-3eb0c70c-c5a8-468c-8eab-eb5a5367f27f. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 629.149981] env[68798]: DEBUG oslo_concurrency.lockutils [req-4d12788e-5e2d-4264-baef-13dedcd6a141 req-20e54eff-322d-4bcb-929d-90028277303e service nova] Acquiring lock "refresh_cache-c1606420-0fd3-4bd3-a8fa-91772c11f9bf" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 629.150110] env[68798]: DEBUG oslo_concurrency.lockutils [req-4d12788e-5e2d-4264-baef-13dedcd6a141 req-20e54eff-322d-4bcb-929d-90028277303e service nova] Acquired lock "refresh_cache-c1606420-0fd3-4bd3-a8fa-91772c11f9bf" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.150269] env[68798]: DEBUG nova.network.neutron [req-4d12788e-5e2d-4264-baef-13dedcd6a141 req-20e54eff-322d-4bcb-929d-90028277303e service nova] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Refreshing network info cache for port 3eb0c70c-c5a8-468c-8eab-eb5a5367f27f {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 629.266546] env[68798]: DEBUG nova.network.neutron [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Successfully created port: f2ecc2c6-a72c-410f-bfc6-21b12846808d {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 630.916548] env[68798]: DEBUG nova.network.neutron [req-4d12788e-5e2d-4264-baef-13dedcd6a141 req-20e54eff-322d-4bcb-929d-90028277303e service nova] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Updated VIF entry in instance network info cache for port 3eb0c70c-c5a8-468c-8eab-eb5a5367f27f. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 630.916548] env[68798]: DEBUG nova.network.neutron [req-4d12788e-5e2d-4264-baef-13dedcd6a141 req-20e54eff-322d-4bcb-929d-90028277303e service nova] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Updating instance_info_cache with network_info: [{"id": "3eb0c70c-c5a8-468c-8eab-eb5a5367f27f", "address": "fa:16:3e:a8:82:65", "network": {"id": "5740656c-19ac-4048-a36f-b2687c3f3e52", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-2090220514-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cc3c3581f8d34668807f65cefe23fe5f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3eb0c70c-c5", "ovs_interfaceid": "3eb0c70c-c5a8-468c-8eab-eb5a5367f27f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.936667] env[68798]: DEBUG oslo_concurrency.lockutils [req-4d12788e-5e2d-4264-baef-13dedcd6a141 req-20e54eff-322d-4bcb-929d-90028277303e service nova] Releasing lock "refresh_cache-c1606420-0fd3-4bd3-a8fa-91772c11f9bf" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 631.437060] env[68798]: DEBUG nova.network.neutron [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Successfully created port: 0eefdba3-83dd-47f8-9031-8d04a26b999e {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 631.774290] env[68798]: DEBUG oslo_concurrency.lockutils [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Acquiring lock "1ecf18e5-a4a1-4efb-b54a-964b064b51e5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 631.774290] env[68798]: DEBUG oslo_concurrency.lockutils [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Lock "1ecf18e5-a4a1-4efb-b54a-964b064b51e5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 631.794738] env[68798]: DEBUG nova.compute.manager [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 631.873964] env[68798]: DEBUG oslo_concurrency.lockutils [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 631.874881] env[68798]: DEBUG oslo_concurrency.lockutils [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 631.879386] env[68798]: INFO nova.compute.claims [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 632.140525] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ef5b1c7-921f-44ee-bd4c-c53c5ac295f3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.149248] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0babbd85-9464-438e-b16e-0012443a9ad2 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.186384] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a26d9c7-7a8c-40b7-891e-fb337afbe841 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.193829] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8fd909-a162-4f34-97c9-915001093521 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.212158] env[68798]: DEBUG nova.compute.provider_tree [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 632.229124] env[68798]: DEBUG nova.scheduler.client.report [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 632.247275] env[68798]: DEBUG oslo_concurrency.lockutils [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.373s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 632.247797] env[68798]: DEBUG nova.compute.manager [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 632.300862] env[68798]: DEBUG nova.compute.utils [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 632.304293] env[68798]: DEBUG nova.compute.manager [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 632.304293] env[68798]: DEBUG nova.network.neutron [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 632.319546] env[68798]: DEBUG nova.compute.manager [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 632.400323] env[68798]: DEBUG nova.compute.manager [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 632.437018] env[68798]: DEBUG nova.virt.hardware [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 632.437018] env[68798]: DEBUG nova.virt.hardware [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 632.437018] env[68798]: DEBUG nova.virt.hardware [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 632.437245] env[68798]: DEBUG nova.virt.hardware [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 632.437245] env[68798]: DEBUG nova.virt.hardware [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 632.437245] env[68798]: DEBUG nova.virt.hardware [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 632.437245] env[68798]: DEBUG nova.virt.hardware [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 632.437245] env[68798]: DEBUG nova.virt.hardware [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 632.437381] env[68798]: DEBUG nova.virt.hardware [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 632.437381] env[68798]: DEBUG nova.virt.hardware [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 632.437381] env[68798]: DEBUG nova.virt.hardware [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 632.437381] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d51088c-291a-4d82-b6ff-1d045af7e94c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.448766] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62915b03-7302-4f24-8769-b258f6326ecf {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.897279] env[68798]: DEBUG nova.policy [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7d65e792fb43450fb5b0aacd66679fdc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd9caea5e2a3d4e5286e3d93eee026dce', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 633.201924] env[68798]: DEBUG nova.network.neutron [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Successfully updated port: f2ecc2c6-a72c-410f-bfc6-21b12846808d {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 633.226040] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Acquiring lock "refresh_cache-620ef3f6-0444-474d-8179-3dc0143f2e99" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 633.226194] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Acquired lock "refresh_cache-620ef3f6-0444-474d-8179-3dc0143f2e99" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.226342] env[68798]: DEBUG nova.network.neutron [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 633.565333] env[68798]: DEBUG nova.network.neutron [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 634.778483] env[68798]: DEBUG nova.network.neutron [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Updating instance_info_cache with network_info: [{"id": "f2ecc2c6-a72c-410f-bfc6-21b12846808d", "address": "fa:16:3e:9c:81:fa", "network": {"id": "23728427-d693-4717-b743-93f76965b932", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-573643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dadfddb85ecc42e78957ff79ec19aa44", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad72c645-a67d-4efd-b563-28e44077e68d", "external-id": "nsx-vlan-transportzone-201", "segmentation_id": 201, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2ecc2c6-a7", "ovs_interfaceid": "f2ecc2c6-a72c-410f-bfc6-21b12846808d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.795133] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Releasing lock "refresh_cache-620ef3f6-0444-474d-8179-3dc0143f2e99" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 634.795133] env[68798]: DEBUG nova.compute.manager [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Instance network_info: |[{"id": "f2ecc2c6-a72c-410f-bfc6-21b12846808d", "address": "fa:16:3e:9c:81:fa", "network": {"id": "23728427-d693-4717-b743-93f76965b932", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-573643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dadfddb85ecc42e78957ff79ec19aa44", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad72c645-a67d-4efd-b563-28e44077e68d", "external-id": "nsx-vlan-transportzone-201", "segmentation_id": 201, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2ecc2c6-a7", "ovs_interfaceid": "f2ecc2c6-a72c-410f-bfc6-21b12846808d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 634.795304] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:81:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ad72c645-a67d-4efd-b563-28e44077e68d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f2ecc2c6-a72c-410f-bfc6-21b12846808d', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 634.804152] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Creating folder: Project (dadfddb85ecc42e78957ff79ec19aa44). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 634.804744] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4a8adb1d-4db7-4b5a-ae04-d3b120063f00 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.816389] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Created folder: Project (dadfddb85ecc42e78957ff79ec19aa44) in parent group-v834492. [ 634.816611] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Creating folder: Instances. Parent ref: group-v834514. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 634.816864] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f839701f-a2a8-4449-9219-485f11a85fde {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.827867] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Created folder: Instances in parent group-v834514. [ 634.828205] env[68798]: DEBUG oslo.service.loopingcall [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 634.828453] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 634.828668] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-57cf7e52-2641-42e1-b1a7-a83e9c619123 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.855986] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 634.855986] env[68798]: value = "task-4217530" [ 634.855986] env[68798]: _type = "Task" [ 634.855986] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.865812] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217530, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.369680] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217530, 'name': CreateVM_Task, 'duration_secs': 0.300701} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.370903] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 635.370993] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 635.371524] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.371851] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 635.372165] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffa7a5c8-9674-44ab-a1de-d147660b745b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.378394] env[68798]: DEBUG oslo_vmware.api [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Waiting for the task: (returnval){ [ 635.378394] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]525d35f9-ca6a-feea-5229-737075061fd8" [ 635.378394] env[68798]: _type = "Task" [ 635.378394] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.390013] env[68798]: DEBUG oslo_vmware.api [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]525d35f9-ca6a-feea-5229-737075061fd8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.586255] env[68798]: DEBUG nova.network.neutron [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Successfully created port: 4d6dc99a-fba7-4735-a0b3-0e40cf630d94 {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 635.781187] env[68798]: DEBUG nova.network.neutron [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Successfully updated port: 0eefdba3-83dd-47f8-9031-8d04a26b999e {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 635.799183] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Acquiring lock "refresh_cache-e060aaea-7508-46ed-8786-b5753fde75e9" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 635.799183] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Acquired lock "refresh_cache-e060aaea-7508-46ed-8786-b5753fde75e9" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.799183] env[68798]: DEBUG nova.network.neutron [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 635.890799] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 635.891170] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 635.891446] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 635.975099] env[68798]: DEBUG nova.network.neutron [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 636.242620] env[68798]: DEBUG nova.compute.manager [req-a97f3a0e-7d98-4d9b-bb6e-ba801112a5fa req-33a86355-11d4-4a18-bc0a-b2f43ca9d05f service nova] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Received event network-vif-plugged-f2ecc2c6-a72c-410f-bfc6-21b12846808d {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 636.242620] env[68798]: DEBUG oslo_concurrency.lockutils [req-a97f3a0e-7d98-4d9b-bb6e-ba801112a5fa req-33a86355-11d4-4a18-bc0a-b2f43ca9d05f service nova] Acquiring lock "620ef3f6-0444-474d-8179-3dc0143f2e99-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.242620] env[68798]: DEBUG oslo_concurrency.lockutils [req-a97f3a0e-7d98-4d9b-bb6e-ba801112a5fa req-33a86355-11d4-4a18-bc0a-b2f43ca9d05f service nova] Lock "620ef3f6-0444-474d-8179-3dc0143f2e99-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.242620] env[68798]: DEBUG oslo_concurrency.lockutils [req-a97f3a0e-7d98-4d9b-bb6e-ba801112a5fa req-33a86355-11d4-4a18-bc0a-b2f43ca9d05f service nova] Lock "620ef3f6-0444-474d-8179-3dc0143f2e99-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 636.243038] env[68798]: DEBUG nova.compute.manager [req-a97f3a0e-7d98-4d9b-bb6e-ba801112a5fa req-33a86355-11d4-4a18-bc0a-b2f43ca9d05f service nova] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] No waiting events found dispatching network-vif-plugged-f2ecc2c6-a72c-410f-bfc6-21b12846808d {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 636.243038] env[68798]: WARNING nova.compute.manager [req-a97f3a0e-7d98-4d9b-bb6e-ba801112a5fa req-33a86355-11d4-4a18-bc0a-b2f43ca9d05f service nova] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Received unexpected event network-vif-plugged-f2ecc2c6-a72c-410f-bfc6-21b12846808d for instance with vm_state building and task_state spawning. [ 636.850793] env[68798]: DEBUG nova.network.neutron [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Updating instance_info_cache with network_info: [{"id": "0eefdba3-83dd-47f8-9031-8d04a26b999e", "address": "fa:16:3e:55:aa:55", "network": {"id": "579f6646-7b99-4b4f-a7b6-3ef78f94147b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.169", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc340e748dae4a43b16acfcfeecd7cd0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0eefdba3-83", "ovs_interfaceid": "0eefdba3-83dd-47f8-9031-8d04a26b999e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.872248] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Releasing lock "refresh_cache-e060aaea-7508-46ed-8786-b5753fde75e9" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 636.872248] env[68798]: DEBUG nova.compute.manager [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Instance network_info: |[{"id": "0eefdba3-83dd-47f8-9031-8d04a26b999e", "address": "fa:16:3e:55:aa:55", "network": {"id": "579f6646-7b99-4b4f-a7b6-3ef78f94147b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.169", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc340e748dae4a43b16acfcfeecd7cd0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0eefdba3-83", "ovs_interfaceid": "0eefdba3-83dd-47f8-9031-8d04a26b999e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 636.872780] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:aa:55', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '43ad01d2-c7dd-453c-a929-8ad76294d13c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0eefdba3-83dd-47f8-9031-8d04a26b999e', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 636.881160] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Creating folder: Project (49b472c72e5b4f9897b505fa5c68e9f2). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 636.882014] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e8c86472-5d53-4dd6-9ba4-81361e4e2531 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.896316] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Created folder: Project (49b472c72e5b4f9897b505fa5c68e9f2) in parent group-v834492. [ 636.896772] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Creating folder: Instances. Parent ref: group-v834517. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 636.897219] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dfd8e324-acaf-44a3-b5be-69be9257acf4 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.908672] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Created folder: Instances in parent group-v834517. [ 636.909846] env[68798]: DEBUG oslo.service.loopingcall [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 636.909846] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 636.909846] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-989ce3a8-e9b7-431d-ae24-d728a1b96122 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.938518] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 636.938518] env[68798]: value = "task-4217533" [ 636.938518] env[68798]: _type = "Task" [ 636.938518] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.948035] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217533, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.448776] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217533, 'name': CreateVM_Task, 'duration_secs': 0.382759} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.449017] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 637.449781] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 637.449992] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.450366] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 637.451213] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b8276dc-712d-4621-909b-bad1ad1a0db2 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.457020] env[68798]: DEBUG oslo_vmware.api [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Waiting for the task: (returnval){ [ 637.457020] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52489578-ad00-93f3-e4ef-906f78db3f68" [ 637.457020] env[68798]: _type = "Task" [ 637.457020] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.468379] env[68798]: DEBUG oslo_vmware.api [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52489578-ad00-93f3-e4ef-906f78db3f68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.551724] env[68798]: DEBUG nova.network.neutron [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Successfully updated port: 4d6dc99a-fba7-4735-a0b3-0e40cf630d94 {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 637.563306] env[68798]: DEBUG oslo_concurrency.lockutils [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Acquiring lock "refresh_cache-1ecf18e5-a4a1-4efb-b54a-964b064b51e5" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 637.563448] env[68798]: DEBUG oslo_concurrency.lockutils [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Acquired lock "refresh_cache-1ecf18e5-a4a1-4efb-b54a-964b064b51e5" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.563593] env[68798]: DEBUG nova.network.neutron [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 637.666371] env[68798]: DEBUG nova.network.neutron [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 637.973434] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 637.973824] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 637.973977] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 638.160740] env[68798]: DEBUG nova.network.neutron [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Updating instance_info_cache with network_info: [{"id": "4d6dc99a-fba7-4735-a0b3-0e40cf630d94", "address": "fa:16:3e:f0:8c:d4", "network": {"id": "579f6646-7b99-4b4f-a7b6-3ef78f94147b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.254", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc340e748dae4a43b16acfcfeecd7cd0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d6dc99a-fb", "ovs_interfaceid": "4d6dc99a-fba7-4735-a0b3-0e40cf630d94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.212450] env[68798]: DEBUG oslo_concurrency.lockutils [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Releasing lock "refresh_cache-1ecf18e5-a4a1-4efb-b54a-964b064b51e5" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 638.213921] env[68798]: DEBUG nova.compute.manager [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Instance network_info: |[{"id": "4d6dc99a-fba7-4735-a0b3-0e40cf630d94", "address": "fa:16:3e:f0:8c:d4", "network": {"id": "579f6646-7b99-4b4f-a7b6-3ef78f94147b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.254", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc340e748dae4a43b16acfcfeecd7cd0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d6dc99a-fb", "ovs_interfaceid": "4d6dc99a-fba7-4735-a0b3-0e40cf630d94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 638.216124] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:8c:d4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '43ad01d2-c7dd-453c-a929-8ad76294d13c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4d6dc99a-fba7-4735-a0b3-0e40cf630d94', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 638.226636] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Creating folder: Project (d9caea5e2a3d4e5286e3d93eee026dce). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 638.231180] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a9c71a3b-7bed-49b2-852e-d438fb0128c0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.247099] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Created folder: Project (d9caea5e2a3d4e5286e3d93eee026dce) in parent group-v834492. [ 638.247099] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Creating folder: Instances. Parent ref: group-v834520. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 638.247099] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9cf1dc65-297d-4250-9954-64f4e6609442 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.261129] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Created folder: Instances in parent group-v834520. [ 638.261434] env[68798]: DEBUG oslo.service.loopingcall [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 638.262060] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 638.262327] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0e182823-c404-4613-8e25-103e789956e0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.283572] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 638.283572] env[68798]: value = "task-4217536" [ 638.283572] env[68798]: _type = "Task" [ 638.283572] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.292278] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217536, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.768021] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Acquiring lock "3bf7d713-8315-48d9-85dd-4ff09c9c7782" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.768273] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Lock "3bf7d713-8315-48d9-85dd-4ff09c9c7782" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.793391] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217536, 'name': CreateVM_Task, 'duration_secs': 0.402352} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.793505] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 638.794108] env[68798]: DEBUG oslo_concurrency.lockutils [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 638.794448] env[68798]: DEBUG oslo_concurrency.lockutils [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.794601] env[68798]: DEBUG oslo_concurrency.lockutils [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 638.794860] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3492ff0c-00f2-4776-8e82-acf37734df59 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.802625] env[68798]: DEBUG oslo_vmware.api [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Waiting for the task: (returnval){ [ 638.802625] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52de5c7d-2083-9a48-6859-c3e491b4fd8f" [ 638.802625] env[68798]: _type = "Task" [ 638.802625] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.809930] env[68798]: DEBUG oslo_vmware.api [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52de5c7d-2083-9a48-6859-c3e491b4fd8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.977813] env[68798]: DEBUG nova.compute.manager [req-2d9d54b4-d2d9-4c6c-8d51-d5cc752f7f8f req-d3b7b7ee-633b-4d56-b51b-0518aa2ebff5 service nova] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Received event network-vif-plugged-0eefdba3-83dd-47f8-9031-8d04a26b999e {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 638.977813] env[68798]: DEBUG oslo_concurrency.lockutils [req-2d9d54b4-d2d9-4c6c-8d51-d5cc752f7f8f req-d3b7b7ee-633b-4d56-b51b-0518aa2ebff5 service nova] Acquiring lock "e060aaea-7508-46ed-8786-b5753fde75e9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.977813] env[68798]: DEBUG oslo_concurrency.lockutils [req-2d9d54b4-d2d9-4c6c-8d51-d5cc752f7f8f req-d3b7b7ee-633b-4d56-b51b-0518aa2ebff5 service nova] Lock "e060aaea-7508-46ed-8786-b5753fde75e9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.978378] env[68798]: DEBUG oslo_concurrency.lockutils [req-2d9d54b4-d2d9-4c6c-8d51-d5cc752f7f8f req-d3b7b7ee-633b-4d56-b51b-0518aa2ebff5 service nova] Lock "e060aaea-7508-46ed-8786-b5753fde75e9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.978718] env[68798]: DEBUG nova.compute.manager [req-2d9d54b4-d2d9-4c6c-8d51-d5cc752f7f8f req-d3b7b7ee-633b-4d56-b51b-0518aa2ebff5 service nova] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] No waiting events found dispatching network-vif-plugged-0eefdba3-83dd-47f8-9031-8d04a26b999e {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 638.979129] env[68798]: WARNING nova.compute.manager [req-2d9d54b4-d2d9-4c6c-8d51-d5cc752f7f8f req-d3b7b7ee-633b-4d56-b51b-0518aa2ebff5 service nova] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Received unexpected event network-vif-plugged-0eefdba3-83dd-47f8-9031-8d04a26b999e for instance with vm_state building and task_state spawning. [ 639.314062] env[68798]: DEBUG oslo_concurrency.lockutils [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 639.314346] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 639.314623] env[68798]: DEBUG oslo_concurrency.lockutils [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 639.415993] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Acquiring lock "cdb141da-a05c-4891-a33d-6e12eafe4f22" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.415993] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Lock "cdb141da-a05c-4891-a33d-6e12eafe4f22" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 639.859012] env[68798]: DEBUG oslo_concurrency.lockutils [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Acquiring lock "551707b9-118e-45c8-a28f-e70486272f6e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 639.859212] env[68798]: DEBUG oslo_concurrency.lockutils [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Lock "551707b9-118e-45c8-a28f-e70486272f6e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.926121] env[68798]: DEBUG nova.compute.manager [req-5ca3de93-098e-4638-9691-dcd7ec2ed9ac req-4a5f559b-58fc-4971-9816-b1df9e0072b0 service nova] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Received event network-changed-f2ecc2c6-a72c-410f-bfc6-21b12846808d {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 640.926121] env[68798]: DEBUG nova.compute.manager [req-5ca3de93-098e-4638-9691-dcd7ec2ed9ac req-4a5f559b-58fc-4971-9816-b1df9e0072b0 service nova] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Refreshing instance network info cache due to event network-changed-f2ecc2c6-a72c-410f-bfc6-21b12846808d. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 640.927070] env[68798]: DEBUG oslo_concurrency.lockutils [req-5ca3de93-098e-4638-9691-dcd7ec2ed9ac req-4a5f559b-58fc-4971-9816-b1df9e0072b0 service nova] Acquiring lock "refresh_cache-620ef3f6-0444-474d-8179-3dc0143f2e99" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 640.927384] env[68798]: DEBUG oslo_concurrency.lockutils [req-5ca3de93-098e-4638-9691-dcd7ec2ed9ac req-4a5f559b-58fc-4971-9816-b1df9e0072b0 service nova] Acquired lock "refresh_cache-620ef3f6-0444-474d-8179-3dc0143f2e99" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.928708] env[68798]: DEBUG nova.network.neutron [req-5ca3de93-098e-4638-9691-dcd7ec2ed9ac req-4a5f559b-58fc-4971-9816-b1df9e0072b0 service nova] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Refreshing network info cache for port f2ecc2c6-a72c-410f-bfc6-21b12846808d {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 641.304511] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Acquiring lock "89f660c8-6efd-4789-90ee-67e42abc1db7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 641.304739] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Lock "89f660c8-6efd-4789-90ee-67e42abc1db7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 641.895823] env[68798]: DEBUG nova.network.neutron [req-5ca3de93-098e-4638-9691-dcd7ec2ed9ac req-4a5f559b-58fc-4971-9816-b1df9e0072b0 service nova] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Updated VIF entry in instance network info cache for port f2ecc2c6-a72c-410f-bfc6-21b12846808d. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 641.896140] env[68798]: DEBUG nova.network.neutron [req-5ca3de93-098e-4638-9691-dcd7ec2ed9ac req-4a5f559b-58fc-4971-9816-b1df9e0072b0 service nova] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Updating instance_info_cache with network_info: [{"id": "f2ecc2c6-a72c-410f-bfc6-21b12846808d", "address": "fa:16:3e:9c:81:fa", "network": {"id": "23728427-d693-4717-b743-93f76965b932", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-573643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dadfddb85ecc42e78957ff79ec19aa44", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad72c645-a67d-4efd-b563-28e44077e68d", "external-id": "nsx-vlan-transportzone-201", "segmentation_id": 201, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2ecc2c6-a7", "ovs_interfaceid": "f2ecc2c6-a72c-410f-bfc6-21b12846808d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.916368] env[68798]: DEBUG oslo_concurrency.lockutils [req-5ca3de93-098e-4638-9691-dcd7ec2ed9ac req-4a5f559b-58fc-4971-9816-b1df9e0072b0 service nova] Releasing lock "refresh_cache-620ef3f6-0444-474d-8179-3dc0143f2e99" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 641.916368] env[68798]: DEBUG nova.compute.manager [req-5ca3de93-098e-4638-9691-dcd7ec2ed9ac req-4a5f559b-58fc-4971-9816-b1df9e0072b0 service nova] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Received event network-changed-0eefdba3-83dd-47f8-9031-8d04a26b999e {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 641.916368] env[68798]: DEBUG nova.compute.manager [req-5ca3de93-098e-4638-9691-dcd7ec2ed9ac req-4a5f559b-58fc-4971-9816-b1df9e0072b0 service nova] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Refreshing instance network info cache due to event network-changed-0eefdba3-83dd-47f8-9031-8d04a26b999e. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 641.916368] env[68798]: DEBUG oslo_concurrency.lockutils [req-5ca3de93-098e-4638-9691-dcd7ec2ed9ac req-4a5f559b-58fc-4971-9816-b1df9e0072b0 service nova] Acquiring lock "refresh_cache-e060aaea-7508-46ed-8786-b5753fde75e9" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 641.916506] env[68798]: DEBUG oslo_concurrency.lockutils [req-5ca3de93-098e-4638-9691-dcd7ec2ed9ac req-4a5f559b-58fc-4971-9816-b1df9e0072b0 service nova] Acquired lock "refresh_cache-e060aaea-7508-46ed-8786-b5753fde75e9" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.916593] env[68798]: DEBUG nova.network.neutron [req-5ca3de93-098e-4638-9691-dcd7ec2ed9ac req-4a5f559b-58fc-4971-9816-b1df9e0072b0 service nova] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Refreshing network info cache for port 0eefdba3-83dd-47f8-9031-8d04a26b999e {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 642.419761] env[68798]: DEBUG nova.network.neutron [req-5ca3de93-098e-4638-9691-dcd7ec2ed9ac req-4a5f559b-58fc-4971-9816-b1df9e0072b0 service nova] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Updated VIF entry in instance network info cache for port 0eefdba3-83dd-47f8-9031-8d04a26b999e. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 642.420228] env[68798]: DEBUG nova.network.neutron [req-5ca3de93-098e-4638-9691-dcd7ec2ed9ac req-4a5f559b-58fc-4971-9816-b1df9e0072b0 service nova] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Updating instance_info_cache with network_info: [{"id": "0eefdba3-83dd-47f8-9031-8d04a26b999e", "address": "fa:16:3e:55:aa:55", "network": {"id": "579f6646-7b99-4b4f-a7b6-3ef78f94147b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.169", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc340e748dae4a43b16acfcfeecd7cd0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0eefdba3-83", "ovs_interfaceid": "0eefdba3-83dd-47f8-9031-8d04a26b999e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.438517] env[68798]: DEBUG oslo_concurrency.lockutils [req-5ca3de93-098e-4638-9691-dcd7ec2ed9ac req-4a5f559b-58fc-4971-9816-b1df9e0072b0 service nova] Releasing lock "refresh_cache-e060aaea-7508-46ed-8786-b5753fde75e9" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 642.438781] env[68798]: DEBUG nova.compute.manager [req-5ca3de93-098e-4638-9691-dcd7ec2ed9ac req-4a5f559b-58fc-4971-9816-b1df9e0072b0 service nova] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Received event network-vif-plugged-4d6dc99a-fba7-4735-a0b3-0e40cf630d94 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 642.439031] env[68798]: DEBUG oslo_concurrency.lockutils [req-5ca3de93-098e-4638-9691-dcd7ec2ed9ac req-4a5f559b-58fc-4971-9816-b1df9e0072b0 service nova] Acquiring lock "1ecf18e5-a4a1-4efb-b54a-964b064b51e5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.439393] env[68798]: DEBUG oslo_concurrency.lockutils [req-5ca3de93-098e-4638-9691-dcd7ec2ed9ac req-4a5f559b-58fc-4971-9816-b1df9e0072b0 service nova] Lock "1ecf18e5-a4a1-4efb-b54a-964b064b51e5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.439475] env[68798]: DEBUG oslo_concurrency.lockutils [req-5ca3de93-098e-4638-9691-dcd7ec2ed9ac req-4a5f559b-58fc-4971-9816-b1df9e0072b0 service nova] Lock "1ecf18e5-a4a1-4efb-b54a-964b064b51e5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 642.439608] env[68798]: DEBUG nova.compute.manager [req-5ca3de93-098e-4638-9691-dcd7ec2ed9ac req-4a5f559b-58fc-4971-9816-b1df9e0072b0 service nova] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] No waiting events found dispatching network-vif-plugged-4d6dc99a-fba7-4735-a0b3-0e40cf630d94 {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 642.440575] env[68798]: WARNING nova.compute.manager [req-5ca3de93-098e-4638-9691-dcd7ec2ed9ac req-4a5f559b-58fc-4971-9816-b1df9e0072b0 service nova] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Received unexpected event network-vif-plugged-4d6dc99a-fba7-4735-a0b3-0e40cf630d94 for instance with vm_state building and task_state spawning. [ 642.441339] env[68798]: DEBUG nova.compute.manager [req-5ca3de93-098e-4638-9691-dcd7ec2ed9ac req-4a5f559b-58fc-4971-9816-b1df9e0072b0 service nova] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Received event network-changed-4d6dc99a-fba7-4735-a0b3-0e40cf630d94 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 642.441339] env[68798]: DEBUG nova.compute.manager [req-5ca3de93-098e-4638-9691-dcd7ec2ed9ac req-4a5f559b-58fc-4971-9816-b1df9e0072b0 service nova] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Refreshing instance network info cache due to event network-changed-4d6dc99a-fba7-4735-a0b3-0e40cf630d94. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 642.441339] env[68798]: DEBUG oslo_concurrency.lockutils [req-5ca3de93-098e-4638-9691-dcd7ec2ed9ac req-4a5f559b-58fc-4971-9816-b1df9e0072b0 service nova] Acquiring lock "refresh_cache-1ecf18e5-a4a1-4efb-b54a-964b064b51e5" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.441339] env[68798]: DEBUG oslo_concurrency.lockutils [req-5ca3de93-098e-4638-9691-dcd7ec2ed9ac req-4a5f559b-58fc-4971-9816-b1df9e0072b0 service nova] Acquired lock "refresh_cache-1ecf18e5-a4a1-4efb-b54a-964b064b51e5" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.441539] env[68798]: DEBUG nova.network.neutron [req-5ca3de93-098e-4638-9691-dcd7ec2ed9ac req-4a5f559b-58fc-4971-9816-b1df9e0072b0 service nova] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Refreshing network info cache for port 4d6dc99a-fba7-4735-a0b3-0e40cf630d94 {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 642.972299] env[68798]: DEBUG nova.network.neutron [req-5ca3de93-098e-4638-9691-dcd7ec2ed9ac req-4a5f559b-58fc-4971-9816-b1df9e0072b0 service nova] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Updated VIF entry in instance network info cache for port 4d6dc99a-fba7-4735-a0b3-0e40cf630d94. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 642.972299] env[68798]: DEBUG nova.network.neutron [req-5ca3de93-098e-4638-9691-dcd7ec2ed9ac req-4a5f559b-58fc-4971-9816-b1df9e0072b0 service nova] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Updating instance_info_cache with network_info: [{"id": "4d6dc99a-fba7-4735-a0b3-0e40cf630d94", "address": "fa:16:3e:f0:8c:d4", "network": {"id": "579f6646-7b99-4b4f-a7b6-3ef78f94147b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.254", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc340e748dae4a43b16acfcfeecd7cd0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d6dc99a-fb", "ovs_interfaceid": "4d6dc99a-fba7-4735-a0b3-0e40cf630d94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.994026] env[68798]: DEBUG oslo_concurrency.lockutils [req-5ca3de93-098e-4638-9691-dcd7ec2ed9ac req-4a5f559b-58fc-4971-9816-b1df9e0072b0 service nova] Releasing lock "refresh_cache-1ecf18e5-a4a1-4efb-b54a-964b064b51e5" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 643.312540] env[68798]: DEBUG oslo_concurrency.lockutils [None req-29280c3d-cde0-46b4-afb5-64b459be9426 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Acquiring lock "2fc5f330-2a23-4a67-a49c-c4985928417b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 643.312772] env[68798]: DEBUG oslo_concurrency.lockutils [None req-29280c3d-cde0-46b4-afb5-64b459be9426 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Lock "2fc5f330-2a23-4a67-a49c-c4985928417b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 643.324647] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b880fb0-15c7-4635-b80f-2f0f02006412 tempest-VolumesAssistedSnapshotsTest-1167727799 tempest-VolumesAssistedSnapshotsTest-1167727799-project-member] Acquiring lock "9d533b44-6afa-4c5b-a0a6-90ff442f7771" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 643.324873] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b880fb0-15c7-4635-b80f-2f0f02006412 tempest-VolumesAssistedSnapshotsTest-1167727799 tempest-VolumesAssistedSnapshotsTest-1167727799-project-member] Lock "9d533b44-6afa-4c5b-a0a6-90ff442f7771" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 644.247396] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f2df0b5d-ddcf-4bd9-bfef-00516721b11d tempest-ServersWithSpecificFlavorTestJSON-1621209597 tempest-ServersWithSpecificFlavorTestJSON-1621209597-project-member] Acquiring lock "ad71bb4b-829d-4297-857f-249e4c499623" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 644.247396] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f2df0b5d-ddcf-4bd9-bfef-00516721b11d tempest-ServersWithSpecificFlavorTestJSON-1621209597 tempest-ServersWithSpecificFlavorTestJSON-1621209597-project-member] Lock "ad71bb4b-829d-4297-857f-249e4c499623" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 644.883460] env[68798]: DEBUG oslo_concurrency.lockutils [None req-924772f1-c2a1-404b-ba6e-fe6ee1238c10 tempest-FloatingIPsAssociationNegativeTestJSON-1003925669 tempest-FloatingIPsAssociationNegativeTestJSON-1003925669-project-member] Acquiring lock "9778caa6-4d64-483e-9b72-e82c9977f9ce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 644.883637] env[68798]: DEBUG oslo_concurrency.lockutils [None req-924772f1-c2a1-404b-ba6e-fe6ee1238c10 tempest-FloatingIPsAssociationNegativeTestJSON-1003925669 tempest-FloatingIPsAssociationNegativeTestJSON-1003925669-project-member] Lock "9778caa6-4d64-483e-9b72-e82c9977f9ce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 646.556118] env[68798]: DEBUG oslo_concurrency.lockutils [None req-949cd8ba-80d4-4bf1-ba1d-fad9038a1c01 tempest-ListServersNegativeTestJSON-997523842 tempest-ListServersNegativeTestJSON-997523842-project-member] Acquiring lock "6a79f513-a8f0-4ceb-b4fd-466474dd67c8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 646.556118] env[68798]: DEBUG oslo_concurrency.lockutils [None req-949cd8ba-80d4-4bf1-ba1d-fad9038a1c01 tempest-ListServersNegativeTestJSON-997523842 tempest-ListServersNegativeTestJSON-997523842-project-member] Lock "6a79f513-a8f0-4ceb-b4fd-466474dd67c8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 646.589126] env[68798]: DEBUG oslo_concurrency.lockutils [None req-949cd8ba-80d4-4bf1-ba1d-fad9038a1c01 tempest-ListServersNegativeTestJSON-997523842 tempest-ListServersNegativeTestJSON-997523842-project-member] Acquiring lock "ced820f2-8c09-4000-8f46-e0c5909e5b2e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 646.589418] env[68798]: DEBUG oslo_concurrency.lockutils [None req-949cd8ba-80d4-4bf1-ba1d-fad9038a1c01 tempest-ListServersNegativeTestJSON-997523842 tempest-ListServersNegativeTestJSON-997523842-project-member] Lock "ced820f2-8c09-4000-8f46-e0c5909e5b2e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 646.637942] env[68798]: DEBUG oslo_concurrency.lockutils [None req-949cd8ba-80d4-4bf1-ba1d-fad9038a1c01 tempest-ListServersNegativeTestJSON-997523842 tempest-ListServersNegativeTestJSON-997523842-project-member] Acquiring lock "9c094711-2653-4a44-a4b0-020621beb4fe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 646.638423] env[68798]: DEBUG oslo_concurrency.lockutils [None req-949cd8ba-80d4-4bf1-ba1d-fad9038a1c01 tempest-ListServersNegativeTestJSON-997523842 tempest-ListServersNegativeTestJSON-997523842-project-member] Lock "9c094711-2653-4a44-a4b0-020621beb4fe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 646.945296] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c96d0001-2c58-4a6e-ad48-e080b6f1e5b8 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Acquiring lock "092cc50b-edcc-4d9b-a981-a06e2eaaf321" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 646.945296] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c96d0001-2c58-4a6e-ad48-e080b6f1e5b8 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Lock "092cc50b-edcc-4d9b-a981-a06e2eaaf321" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 652.501535] env[68798]: WARNING oslo_vmware.rw_handles [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 652.501535] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 652.501535] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 652.501535] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 652.501535] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 652.501535] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 652.501535] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 652.501535] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 652.501535] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 652.501535] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 652.501535] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 652.501535] env[68798]: ERROR oslo_vmware.rw_handles [ 652.502310] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/7effcf7b-5dc7-4a40-9709-2c080d548ed8/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 652.503984] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 652.503984] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Copying Virtual Disk [datastore1] vmware_temp/7effcf7b-5dc7-4a40-9709-2c080d548ed8/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/7effcf7b-5dc7-4a40-9709-2c080d548ed8/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 652.504199] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-27039e49-9b66-4084-a6f9-d5c44cadea87 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.517039] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Acquiring lock "9923a3c7-f090-4a01-8c57-36c8c22c6b14" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 652.517039] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Lock "9923a3c7-f090-4a01-8c57-36c8c22c6b14" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 652.517039] env[68798]: DEBUG oslo_vmware.api [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Waiting for the task: (returnval){ [ 652.517039] env[68798]: value = "task-4217537" [ 652.517039] env[68798]: _type = "Task" [ 652.517039] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.529081] env[68798]: DEBUG oslo_vmware.api [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Task: {'id': task-4217537, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.031093] env[68798]: DEBUG oslo_vmware.exceptions [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 653.031659] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.036565] env[68798]: ERROR nova.compute.manager [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 653.036565] env[68798]: Faults: ['InvalidArgument'] [ 653.036565] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Traceback (most recent call last): [ 653.036565] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 653.036565] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] yield resources [ 653.036565] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 653.036565] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] self.driver.spawn(context, instance, image_meta, [ 653.036565] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 653.036565] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] self._vmops.spawn(context, instance, image_meta, injected_files, [ 653.036565] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 653.036565] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] self._fetch_image_if_missing(context, vi) [ 653.036565] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 653.037060] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] image_cache(vi, tmp_image_ds_loc) [ 653.037060] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 653.037060] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] vm_util.copy_virtual_disk( [ 653.037060] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 653.037060] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] session._wait_for_task(vmdk_copy_task) [ 653.037060] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 653.037060] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] return self.wait_for_task(task_ref) [ 653.037060] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 653.037060] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] return evt.wait() [ 653.037060] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 653.037060] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] result = hub.switch() [ 653.037060] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 653.037060] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] return self.greenlet.switch() [ 653.037429] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 653.037429] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] self.f(*self.args, **self.kw) [ 653.037429] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 653.037429] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] raise exceptions.translate_fault(task_info.error) [ 653.037429] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 653.037429] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Faults: ['InvalidArgument'] [ 653.037429] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] [ 653.039374] env[68798]: INFO nova.compute.manager [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Terminating instance [ 653.042131] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.042131] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 653.043472] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Acquiring lock "refresh_cache-8b5b6312-25fa-4eee-b951-88457b8e4fad" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 653.043811] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Acquired lock "refresh_cache-8b5b6312-25fa-4eee-b951-88457b8e4fad" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.044126] env[68798]: DEBUG nova.network.neutron [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 653.048453] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-985ab011-c1c9-4eec-9519-0e9aa2e95840 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.063147] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 653.063279] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 653.064542] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bfb250c-15d1-4c15-a001-866b6920162a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.071072] env[68798]: DEBUG oslo_vmware.api [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Waiting for the task: (returnval){ [ 653.071072] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]5271adb7-eda3-739b-de04-6d358a166907" [ 653.071072] env[68798]: _type = "Task" [ 653.071072] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.082742] env[68798]: DEBUG oslo_vmware.api [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]5271adb7-eda3-739b-de04-6d358a166907, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.093723] env[68798]: DEBUG nova.network.neutron [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 653.277963] env[68798]: DEBUG nova.network.neutron [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.289902] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Releasing lock "refresh_cache-8b5b6312-25fa-4eee-b951-88457b8e4fad" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.290343] env[68798]: DEBUG nova.compute.manager [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 653.290606] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 653.291754] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674cd511-df27-40ec-8522-889e44ab0161 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.302018] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 653.302018] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b3b03a4-7377-4267-8a79-229ea540e514 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.385806] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 653.386069] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 653.386460] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Deleting the datastore file [datastore1] 8b5b6312-25fa-4eee-b951-88457b8e4fad {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 653.386939] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-48268be7-1002-4462-a2d4-9f32ce5b4c1f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.398777] env[68798]: DEBUG oslo_vmware.api [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Waiting for the task: (returnval){ [ 653.398777] env[68798]: value = "task-4217539" [ 653.398777] env[68798]: _type = "Task" [ 653.398777] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.407987] env[68798]: DEBUG oslo_vmware.api [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Task: {'id': task-4217539, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.581883] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 653.582204] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Creating directory with path [datastore1] vmware_temp/01d4692a-dbad-45b9-a051-7263cf934a90/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 653.582767] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-924700bd-a802-4011-b734-c93532f9ca3f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.595901] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Created directory with path [datastore1] vmware_temp/01d4692a-dbad-45b9-a051-7263cf934a90/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 653.596131] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Fetch image to [datastore1] vmware_temp/01d4692a-dbad-45b9-a051-7263cf934a90/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 653.596306] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/01d4692a-dbad-45b9-a051-7263cf934a90/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 653.597123] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d44aefb-fbbd-415c-b444-146cfecd985f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.605954] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07f2383e-159e-49e2-8048-7b1ebb3148ab {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.616891] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ad3ee0c-0642-4c0a-b712-0c8b15afddf4 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.653344] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dd83506-6735-41af-8226-ea0eb33177ee {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.659526] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-eed7ff8c-c93e-41e1-8c1c-1a310152cca9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.684774] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 653.744177] env[68798]: DEBUG oslo_vmware.rw_handles [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/01d4692a-dbad-45b9-a051-7263cf934a90/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 653.805152] env[68798]: DEBUG oslo_vmware.rw_handles [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 653.805419] env[68798]: DEBUG oslo_vmware.rw_handles [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/01d4692a-dbad-45b9-a051-7263cf934a90/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 653.909534] env[68798]: DEBUG oslo_vmware.api [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Task: {'id': task-4217539, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.042372} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.909788] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 653.910133] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 653.910647] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 653.910892] env[68798]: INFO nova.compute.manager [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Took 0.62 seconds to destroy the instance on the hypervisor. [ 653.911268] env[68798]: DEBUG oslo.service.loopingcall [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 653.911509] env[68798]: DEBUG nova.compute.manager [-] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Skipping network deallocation for instance since networking was not requested. {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 653.916985] env[68798]: DEBUG nova.compute.claims [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 653.917177] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.917395] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.080663] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a5b3e2ba-4f09-41b7-a739-25a335806028 tempest-ServersTestFqdnHostnames-667902312 tempest-ServersTestFqdnHostnames-667902312-project-member] Acquiring lock "e1925875-04db-4b88-ad54-d33ff804d365" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.080663] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a5b3e2ba-4f09-41b7-a739-25a335806028 tempest-ServersTestFqdnHostnames-667902312 tempest-ServersTestFqdnHostnames-667902312-project-member] Lock "e1925875-04db-4b88-ad54-d33ff804d365" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.410077] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-728ea38b-df31-4f9c-8a78-913ebe1eedd7 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.418308] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd618a6a-7a32-4563-8506-6d3ffeb64a32 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.451199] env[68798]: DEBUG oslo_concurrency.lockutils [None req-de2efd34-c38e-4cea-a2b4-3595e086abdc tempest-ListImageFiltersTestJSON-803056761 tempest-ListImageFiltersTestJSON-803056761-project-member] Acquiring lock "e3dfc177-6f11-48e4-bbac-83bda39fbb8d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.451513] env[68798]: DEBUG oslo_concurrency.lockutils [None req-de2efd34-c38e-4cea-a2b4-3595e086abdc tempest-ListImageFiltersTestJSON-803056761 tempest-ListImageFiltersTestJSON-803056761-project-member] Lock "e3dfc177-6f11-48e4-bbac-83bda39fbb8d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.452519] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8b1ed6c-a0ab-4c27-a089-7c860d60f8f3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.461385] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0829c2ff-acf0-4f1d-bec9-5541230e3799 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.478032] env[68798]: DEBUG nova.compute.provider_tree [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 654.488322] env[68798]: DEBUG nova.scheduler.client.report [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 654.508014] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.590s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 654.508554] env[68798]: ERROR nova.compute.manager [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 654.508554] env[68798]: Faults: ['InvalidArgument'] [ 654.508554] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Traceback (most recent call last): [ 654.508554] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 654.508554] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] self.driver.spawn(context, instance, image_meta, [ 654.508554] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 654.508554] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] self._vmops.spawn(context, instance, image_meta, injected_files, [ 654.508554] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 654.508554] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] self._fetch_image_if_missing(context, vi) [ 654.508554] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 654.508554] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] image_cache(vi, tmp_image_ds_loc) [ 654.508554] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 654.508952] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] vm_util.copy_virtual_disk( [ 654.508952] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 654.508952] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] session._wait_for_task(vmdk_copy_task) [ 654.508952] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 654.508952] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] return self.wait_for_task(task_ref) [ 654.508952] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 654.508952] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] return evt.wait() [ 654.508952] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 654.508952] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] result = hub.switch() [ 654.508952] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 654.508952] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] return self.greenlet.switch() [ 654.508952] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 654.508952] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] self.f(*self.args, **self.kw) [ 654.509504] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 654.509504] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] raise exceptions.translate_fault(task_info.error) [ 654.509504] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 654.509504] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Faults: ['InvalidArgument'] [ 654.509504] env[68798]: ERROR nova.compute.manager [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] [ 654.509504] env[68798]: DEBUG nova.compute.utils [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 654.513781] env[68798]: DEBUG nova.compute.manager [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Build of instance 8b5b6312-25fa-4eee-b951-88457b8e4fad was re-scheduled: A specified parameter was not correct: fileType [ 654.513781] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 654.513947] env[68798]: DEBUG nova.compute.manager [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 654.514202] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Acquiring lock "refresh_cache-8b5b6312-25fa-4eee-b951-88457b8e4fad" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 654.514542] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Acquired lock "refresh_cache-8b5b6312-25fa-4eee-b951-88457b8e4fad" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.514542] env[68798]: DEBUG nova.network.neutron [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 654.551362] env[68798]: DEBUG nova.network.neutron [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 654.682211] env[68798]: DEBUG nova.network.neutron [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.694046] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Releasing lock "refresh_cache-8b5b6312-25fa-4eee-b951-88457b8e4fad" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 654.694301] env[68798]: DEBUG nova.compute.manager [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 654.694488] env[68798]: DEBUG nova.compute.manager [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] [instance: 8b5b6312-25fa-4eee-b951-88457b8e4fad] Skipping network deallocation for instance since networking was not requested. {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 654.818643] env[68798]: INFO nova.scheduler.client.report [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Deleted allocations for instance 8b5b6312-25fa-4eee-b951-88457b8e4fad [ 654.856724] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cf8871c2-4385-47cd-9eb8-1ef54437a6ea tempest-ServersAdmin275Test-413091161 tempest-ServersAdmin275Test-413091161-project-member] Lock "8b5b6312-25fa-4eee-b951-88457b8e4fad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.112s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 654.890022] env[68798]: DEBUG nova.compute.manager [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 654.951718] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.954024] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.954024] env[68798]: INFO nova.compute.claims [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 655.446715] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c575d811-d21f-4f45-9a56-0a15673ba685 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.455252] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2205155-ae44-4cd4-8db0-bdb4627952ed {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.493900] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da71cfa9-2c74-4cb9-9f20-753eb256b105 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.502587] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c5c92b1-980d-4b55-b2ed-433d9eb2506f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.517658] env[68798]: DEBUG nova.compute.provider_tree [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 655.528795] env[68798]: DEBUG nova.scheduler.client.report [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 655.543671] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.591s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.544163] env[68798]: DEBUG nova.compute.manager [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 655.595163] env[68798]: DEBUG nova.compute.utils [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 655.596490] env[68798]: DEBUG nova.compute.manager [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 655.596657] env[68798]: DEBUG nova.network.neutron [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 655.608426] env[68798]: DEBUG nova.compute.manager [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 655.687832] env[68798]: DEBUG nova.compute.manager [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 655.723424] env[68798]: DEBUG nova.virt.hardware [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 655.723962] env[68798]: DEBUG nova.virt.hardware [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 655.724257] env[68798]: DEBUG nova.virt.hardware [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 655.724598] env[68798]: DEBUG nova.virt.hardware [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 655.724866] env[68798]: DEBUG nova.virt.hardware [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 655.725141] env[68798]: DEBUG nova.virt.hardware [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 655.725533] env[68798]: DEBUG nova.virt.hardware [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 655.725813] env[68798]: DEBUG nova.virt.hardware [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 655.726133] env[68798]: DEBUG nova.virt.hardware [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 655.726547] env[68798]: DEBUG nova.virt.hardware [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 655.726841] env[68798]: DEBUG nova.virt.hardware [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 655.728152] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e76780d-e9ec-4c8e-b07a-5eeea950a36e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.744027] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-359d8c17-72f7-40c8-8435-7d8464b68ff2 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.890499] env[68798]: DEBUG nova.policy [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4bab77f0fc81476490d08751215e0ad7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd7bd52d5dfbd4557b59e358710d477dd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 656.735708] env[68798]: DEBUG nova.network.neutron [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Successfully created port: 5e068c7e-b5d6-453e-a289-3d2eb28104a5 {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 656.922426] env[68798]: DEBUG oslo_concurrency.lockutils [None req-5be94f04-1b87-4fbd-8c2f-a5eda8a61397 tempest-ListImageFiltersTestJSON-803056761 tempest-ListImageFiltersTestJSON-803056761-project-member] Acquiring lock "ceb21573-1cb0-4af6-9897-f164997b4b5f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 656.922698] env[68798]: DEBUG oslo_concurrency.lockutils [None req-5be94f04-1b87-4fbd-8c2f-a5eda8a61397 tempest-ListImageFiltersTestJSON-803056761 tempest-ListImageFiltersTestJSON-803056761-project-member] Lock "ceb21573-1cb0-4af6-9897-f164997b4b5f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.018510] env[68798]: DEBUG nova.network.neutron [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Successfully updated port: 5e068c7e-b5d6-453e-a289-3d2eb28104a5 {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 659.039705] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Acquiring lock "refresh_cache-3bf7d713-8315-48d9-85dd-4ff09c9c7782" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 659.040229] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Acquired lock "refresh_cache-3bf7d713-8315-48d9-85dd-4ff09c9c7782" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.040483] env[68798]: DEBUG nova.network.neutron [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 659.407880] env[68798]: DEBUG nova.network.neutron [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 660.027482] env[68798]: DEBUG nova.network.neutron [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Updating instance_info_cache with network_info: [{"id": "5e068c7e-b5d6-453e-a289-3d2eb28104a5", "address": "fa:16:3e:fe:59:d3", "network": {"id": "ba6fcc53-7939-4921-bcbf-51377bd59511", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-55267586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d7bd52d5dfbd4557b59e358710d477dd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a27fd90b-16a5-43af-bede-ae36762ece00", "external-id": "nsx-vlan-transportzone-197", "segmentation_id": 197, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e068c7e-b5", "ovs_interfaceid": "5e068c7e-b5d6-453e-a289-3d2eb28104a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.043579] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Releasing lock "refresh_cache-3bf7d713-8315-48d9-85dd-4ff09c9c7782" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 660.043579] env[68798]: DEBUG nova.compute.manager [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Instance network_info: |[{"id": "5e068c7e-b5d6-453e-a289-3d2eb28104a5", "address": "fa:16:3e:fe:59:d3", "network": {"id": "ba6fcc53-7939-4921-bcbf-51377bd59511", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-55267586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d7bd52d5dfbd4557b59e358710d477dd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a27fd90b-16a5-43af-bede-ae36762ece00", "external-id": "nsx-vlan-transportzone-197", "segmentation_id": 197, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e068c7e-b5", "ovs_interfaceid": "5e068c7e-b5d6-453e-a289-3d2eb28104a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 660.043898] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:59:d3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a27fd90b-16a5-43af-bede-ae36762ece00', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5e068c7e-b5d6-453e-a289-3d2eb28104a5', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 660.059429] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Creating folder: Project (d7bd52d5dfbd4557b59e358710d477dd). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 660.059977] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1ebc2c1d-a431-4582-8bb5-3f1a7e20b8ef {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.074820] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Created folder: Project (d7bd52d5dfbd4557b59e358710d477dd) in parent group-v834492. [ 660.075068] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Creating folder: Instances. Parent ref: group-v834523. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 660.075349] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d4518cfd-e61d-4845-8b05-14c34362c8ea {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.089143] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Created folder: Instances in parent group-v834523. [ 660.089143] env[68798]: DEBUG oslo.service.loopingcall [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 660.089143] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 660.089143] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-643d9496-bd0c-44cc-81dd-1b5ec36248f4 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.114293] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 660.114293] env[68798]: value = "task-4217542" [ 660.114293] env[68798]: _type = "Task" [ 660.114293] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.123449] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217542, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.631100] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217542, 'name': CreateVM_Task, 'duration_secs': 0.317015} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.631307] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 660.632064] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 660.632271] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.632599] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 660.632946] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94d4d636-ea16-4358-a064-12b9582b2962 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.638131] env[68798]: DEBUG oslo_vmware.api [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Waiting for the task: (returnval){ [ 660.638131] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52cb2f18-8012-abaa-44ac-91a07db5eeaa" [ 660.638131] env[68798]: _type = "Task" [ 660.638131] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.647828] env[68798]: DEBUG oslo_vmware.api [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52cb2f18-8012-abaa-44ac-91a07db5eeaa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.090032] env[68798]: DEBUG nova.compute.manager [req-bacbaf31-9e1c-4e12-b666-4498bb9caae8 req-921de322-b5b0-4891-90c6-78c24768d5cc service nova] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Received event network-vif-plugged-5e068c7e-b5d6-453e-a289-3d2eb28104a5 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 661.090032] env[68798]: DEBUG oslo_concurrency.lockutils [req-bacbaf31-9e1c-4e12-b666-4498bb9caae8 req-921de322-b5b0-4891-90c6-78c24768d5cc service nova] Acquiring lock "3bf7d713-8315-48d9-85dd-4ff09c9c7782-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 661.090032] env[68798]: DEBUG oslo_concurrency.lockutils [req-bacbaf31-9e1c-4e12-b666-4498bb9caae8 req-921de322-b5b0-4891-90c6-78c24768d5cc service nova] Lock "3bf7d713-8315-48d9-85dd-4ff09c9c7782-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 661.090032] env[68798]: DEBUG oslo_concurrency.lockutils [req-bacbaf31-9e1c-4e12-b666-4498bb9caae8 req-921de322-b5b0-4891-90c6-78c24768d5cc service nova] Lock "3bf7d713-8315-48d9-85dd-4ff09c9c7782-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 661.090803] env[68798]: DEBUG nova.compute.manager [req-bacbaf31-9e1c-4e12-b666-4498bb9caae8 req-921de322-b5b0-4891-90c6-78c24768d5cc service nova] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] No waiting events found dispatching network-vif-plugged-5e068c7e-b5d6-453e-a289-3d2eb28104a5 {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 661.090803] env[68798]: WARNING nova.compute.manager [req-bacbaf31-9e1c-4e12-b666-4498bb9caae8 req-921de322-b5b0-4891-90c6-78c24768d5cc service nova] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Received unexpected event network-vif-plugged-5e068c7e-b5d6-453e-a289-3d2eb28104a5 for instance with vm_state building and task_state spawning. [ 661.152692] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 661.154118] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 661.154118] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 662.339727] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c4865a6e-bb80-4941-ab90-c6dc9d7d36a7 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Acquiring lock "10442eb5-8f1a-4b4c-9aab-78605de8dfea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 662.340356] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c4865a6e-bb80-4941-ab90-c6dc9d7d36a7 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Lock "10442eb5-8f1a-4b4c-9aab-78605de8dfea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 662.894549] env[68798]: DEBUG oslo_concurrency.lockutils [None req-baf1c83b-91d2-42eb-ac70-24ad6b8bed7d tempest-ServerAddressesTestJSON-68008023 tempest-ServerAddressesTestJSON-68008023-project-member] Acquiring lock "0d4e5325-aa25-4766-a490-9719b3f354c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 662.894787] env[68798]: DEBUG oslo_concurrency.lockutils [None req-baf1c83b-91d2-42eb-ac70-24ad6b8bed7d tempest-ServerAddressesTestJSON-68008023 tempest-ServerAddressesTestJSON-68008023-project-member] Lock "0d4e5325-aa25-4766-a490-9719b3f354c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 663.298520] env[68798]: DEBUG oslo_concurrency.lockutils [None req-1669284c-a9fe-4ca6-a9b9-4343d6e97bae tempest-ServerShowV257Test-1462755352 tempest-ServerShowV257Test-1462755352-project-member] Acquiring lock "0913eee8-aa9a-4e1b-8aec-48d8a9197530" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 663.298748] env[68798]: DEBUG oslo_concurrency.lockutils [None req-1669284c-a9fe-4ca6-a9b9-4343d6e97bae tempest-ServerShowV257Test-1462755352 tempest-ServerShowV257Test-1462755352-project-member] Lock "0913eee8-aa9a-4e1b-8aec-48d8a9197530" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 663.301080] env[68798]: DEBUG oslo_concurrency.lockutils [None req-019f7356-5703-4475-afba-382fd1528c12 tempest-ServersListShow296Test-1039278903 tempest-ServersListShow296Test-1039278903-project-member] Acquiring lock "c59a14e2-3655-4177-961a-34552be1ccb1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 663.302035] env[68798]: DEBUG oslo_concurrency.lockutils [None req-019f7356-5703-4475-afba-382fd1528c12 tempest-ServersListShow296Test-1039278903 tempest-ServersListShow296Test-1039278903-project-member] Lock "c59a14e2-3655-4177-961a-34552be1ccb1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 663.985721] env[68798]: DEBUG nova.compute.manager [req-391ca5c0-2b44-4f28-afb1-ed8cd8d085c4 req-4e0ae3dc-b7ab-4a2b-a26f-c39df4bb412d service nova] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Received event network-changed-5e068c7e-b5d6-453e-a289-3d2eb28104a5 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 663.985994] env[68798]: DEBUG nova.compute.manager [req-391ca5c0-2b44-4f28-afb1-ed8cd8d085c4 req-4e0ae3dc-b7ab-4a2b-a26f-c39df4bb412d service nova] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Refreshing instance network info cache due to event network-changed-5e068c7e-b5d6-453e-a289-3d2eb28104a5. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 663.986142] env[68798]: DEBUG oslo_concurrency.lockutils [req-391ca5c0-2b44-4f28-afb1-ed8cd8d085c4 req-4e0ae3dc-b7ab-4a2b-a26f-c39df4bb412d service nova] Acquiring lock "refresh_cache-3bf7d713-8315-48d9-85dd-4ff09c9c7782" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.986290] env[68798]: DEBUG oslo_concurrency.lockutils [req-391ca5c0-2b44-4f28-afb1-ed8cd8d085c4 req-4e0ae3dc-b7ab-4a2b-a26f-c39df4bb412d service nova] Acquired lock "refresh_cache-3bf7d713-8315-48d9-85dd-4ff09c9c7782" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.986451] env[68798]: DEBUG nova.network.neutron [req-391ca5c0-2b44-4f28-afb1-ed8cd8d085c4 req-4e0ae3dc-b7ab-4a2b-a26f-c39df4bb412d service nova] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Refreshing network info cache for port 5e068c7e-b5d6-453e-a289-3d2eb28104a5 {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 664.420529] env[68798]: DEBUG nova.network.neutron [req-391ca5c0-2b44-4f28-afb1-ed8cd8d085c4 req-4e0ae3dc-b7ab-4a2b-a26f-c39df4bb412d service nova] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Updated VIF entry in instance network info cache for port 5e068c7e-b5d6-453e-a289-3d2eb28104a5. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 664.421052] env[68798]: DEBUG nova.network.neutron [req-391ca5c0-2b44-4f28-afb1-ed8cd8d085c4 req-4e0ae3dc-b7ab-4a2b-a26f-c39df4bb412d service nova] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Updating instance_info_cache with network_info: [{"id": "5e068c7e-b5d6-453e-a289-3d2eb28104a5", "address": "fa:16:3e:fe:59:d3", "network": {"id": "ba6fcc53-7939-4921-bcbf-51377bd59511", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-55267586-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d7bd52d5dfbd4557b59e358710d477dd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a27fd90b-16a5-43af-bede-ae36762ece00", "external-id": "nsx-vlan-transportzone-197", "segmentation_id": 197, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e068c7e-b5", "ovs_interfaceid": "5e068c7e-b5d6-453e-a289-3d2eb28104a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.431215] env[68798]: DEBUG oslo_concurrency.lockutils [req-391ca5c0-2b44-4f28-afb1-ed8cd8d085c4 req-4e0ae3dc-b7ab-4a2b-a26f-c39df4bb412d service nova] Releasing lock "refresh_cache-3bf7d713-8315-48d9-85dd-4ff09c9c7782" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 674.543449] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 674.570943] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 674.572397] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 674.572397] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 674.597204] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: df45083e-ece4-4768-bc08-022fe0185117] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 674.597378] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 674.597510] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 748df295-743b-41be-b873-523b688f2c78] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 674.598446] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 674.598768] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 674.599011] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 674.599092] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 674.599178] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 674.599295] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 674.599415] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 674.599535] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 674.600016] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 674.600399] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 675.047973] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 675.048348] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 675.390116] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cf76ca76-2f97-4be1-9463-b5a82a152b2e tempest-SecurityGroupsTestJSON-1301535814 tempest-SecurityGroupsTestJSON-1301535814-project-member] Acquiring lock "6cf752ef-a49e-477f-8297-59621e69e712" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.390356] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cf76ca76-2f97-4be1-9463-b5a82a152b2e tempest-SecurityGroupsTestJSON-1301535814 tempest-SecurityGroupsTestJSON-1301535814-project-member] Lock "6cf752ef-a49e-477f-8297-59621e69e712" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 676.048731] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 676.049071] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 676.049272] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 676.049520] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 676.049692] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 676.072555] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 676.072824] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 676.073018] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 676.073184] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 676.074615] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ffa5ea-cc2b-43d5-b276-6a66fe559847 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.085118] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74598824-fd3e-4d34-a382-54d5de6e58da {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.101458] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f2e9fe-5b68-4e85-ba02-fc96558d41b9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.112023] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b5db3f-8e9e-45ff-936a-6b6e6145f864 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.139543] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180743MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 676.139692] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 676.139918] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 676.234145] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance df45083e-ece4-4768-bc08-022fe0185117 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 676.234314] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 4995301f-e3c3-4032-adf0-7cffa3497d3e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 676.234440] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 748df295-743b-41be-b873-523b688f2c78 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 676.234560] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 676.234680] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance c1606420-0fd3-4bd3-a8fa-91772c11f9bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 676.234796] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 90d9df19-2d93-4543-a650-4a624f505d5d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 676.234911] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 620ef3f6-0444-474d-8179-3dc0143f2e99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 676.235035] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e060aaea-7508-46ed-8786-b5753fde75e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 676.235152] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 1ecf18e5-a4a1-4efb-b54a-964b064b51e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 676.235262] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 3bf7d713-8315-48d9-85dd-4ff09c9c7782 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 676.268733] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance cdb141da-a05c-4891-a33d-6e12eafe4f22 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 676.307019] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 551707b9-118e-45c8-a28f-e70486272f6e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 676.321232] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 89f660c8-6efd-4789-90ee-67e42abc1db7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 676.342585] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 2fc5f330-2a23-4a67-a49c-c4985928417b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 676.362679] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 9d533b44-6afa-4c5b-a0a6-90ff442f7771 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 676.383686] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ad71bb4b-829d-4297-857f-249e4c499623 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 676.398459] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 9778caa6-4d64-483e-9b72-e82c9977f9ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 676.419217] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 6a79f513-a8f0-4ceb-b4fd-466474dd67c8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 676.436914] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ced820f2-8c09-4000-8f46-e0c5909e5b2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 676.453032] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 9c094711-2653-4a44-a4b0-020621beb4fe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 676.481037] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 092cc50b-edcc-4d9b-a981-a06e2eaaf321 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 676.498869] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 9923a3c7-f090-4a01-8c57-36c8c22c6b14 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 676.535035] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e1925875-04db-4b88-ad54-d33ff804d365 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 676.550549] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e3dfc177-6f11-48e4-bbac-83bda39fbb8d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 676.575526] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ceb21573-1cb0-4af6-9897-f164997b4b5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 676.599101] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 10442eb5-8f1a-4b4c-9aab-78605de8dfea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 676.618793] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 0d4e5325-aa25-4766-a490-9719b3f354c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 676.631761] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 0913eee8-aa9a-4e1b-8aec-48d8a9197530 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 676.646627] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance c59a14e2-3655-4177-961a-34552be1ccb1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 676.661334] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 6cf752ef-a49e-477f-8297-59621e69e712 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 676.662098] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 676.662474] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 677.201038] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b30f71d-284a-4ead-8446-ae66e0579a3b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.208891] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d58e2fb-a82b-413b-ac38-792836a17c7d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.244023] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97f535b5-ada1-4b31-891c-cb674ee998f7 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.250924] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d30279-e3ac-4393-a7b4-70cf5b53cd7e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.264973] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 677.274667] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 677.291310] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 677.291533] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.152s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 679.959578] env[68798]: DEBUG oslo_concurrency.lockutils [None req-021d9b7c-2e90-4e9a-8f4b-903cba2310e6 tempest-AttachInterfacesTestJSON-1343652290 tempest-AttachInterfacesTestJSON-1343652290-project-member] Acquiring lock "2afc987a-45cc-484e-a6f0-a0118e2e73eb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 679.959869] env[68798]: DEBUG oslo_concurrency.lockutils [None req-021d9b7c-2e90-4e9a-8f4b-903cba2310e6 tempest-AttachInterfacesTestJSON-1343652290 tempest-AttachInterfacesTestJSON-1343652290-project-member] Lock "2afc987a-45cc-484e-a6f0-a0118e2e73eb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 687.104452] env[68798]: DEBUG oslo_concurrency.lockutils [None req-1bd55f57-e1a5-490d-b901-1648df88fc08 tempest-AttachInterfacesV270Test-1172441625 tempest-AttachInterfacesV270Test-1172441625-project-member] Acquiring lock "8f9bdc2e-bc45-46f8-a1dd-b98046bddc2c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.105058] env[68798]: DEBUG oslo_concurrency.lockutils [None req-1bd55f57-e1a5-490d-b901-1648df88fc08 tempest-AttachInterfacesV270Test-1172441625 tempest-AttachInterfacesV270Test-1172441625-project-member] Lock "8f9bdc2e-bc45-46f8-a1dd-b98046bddc2c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.472884] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e7e3b441-781c-4adc-966a-a0d06a712e63 tempest-ServersTestBootFromVolume-2010485395 tempest-ServersTestBootFromVolume-2010485395-project-member] Acquiring lock "95cffda2-8119-4401-ac53-9d7042ef7180" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 693.473192] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e7e3b441-781c-4adc-966a-a0d06a712e63 tempest-ServersTestBootFromVolume-2010485395 tempest-ServersTestBootFromVolume-2010485395-project-member] Lock "95cffda2-8119-4401-ac53-9d7042ef7180" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 699.396216] env[68798]: DEBUG oslo_concurrency.lockutils [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Acquiring lock "1752a71d-8cc7-4f88-b097-53094df226e9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 699.396670] env[68798]: DEBUG oslo_concurrency.lockutils [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Lock "1752a71d-8cc7-4f88-b097-53094df226e9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 700.363113] env[68798]: WARNING oslo_vmware.rw_handles [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 700.363113] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 700.363113] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 700.363113] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 700.363113] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 700.363113] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 700.363113] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 700.363113] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 700.363113] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 700.363113] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 700.363113] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 700.363113] env[68798]: ERROR oslo_vmware.rw_handles [ 700.363554] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/01d4692a-dbad-45b9-a051-7263cf934a90/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 700.365090] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 700.365360] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Copying Virtual Disk [datastore1] vmware_temp/01d4692a-dbad-45b9-a051-7263cf934a90/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/01d4692a-dbad-45b9-a051-7263cf934a90/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 700.365660] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e7f693b2-2171-4d3d-9788-48cabc55f191 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.374112] env[68798]: DEBUG oslo_vmware.api [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Waiting for the task: (returnval){ [ 700.374112] env[68798]: value = "task-4217554" [ 700.374112] env[68798]: _type = "Task" [ 700.374112] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.383555] env[68798]: DEBUG oslo_vmware.api [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Task: {'id': task-4217554, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.886491] env[68798]: DEBUG oslo_vmware.exceptions [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 700.886491] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 700.887896] env[68798]: ERROR nova.compute.manager [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 700.887896] env[68798]: Faults: ['InvalidArgument'] [ 700.887896] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] Traceback (most recent call last): [ 700.887896] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 700.887896] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] yield resources [ 700.887896] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 700.887896] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] self.driver.spawn(context, instance, image_meta, [ 700.887896] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 700.887896] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] self._vmops.spawn(context, instance, image_meta, injected_files, [ 700.887896] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 700.887896] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] self._fetch_image_if_missing(context, vi) [ 700.887896] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 700.888263] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] image_cache(vi, tmp_image_ds_loc) [ 700.888263] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 700.888263] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] vm_util.copy_virtual_disk( [ 700.888263] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 700.888263] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] session._wait_for_task(vmdk_copy_task) [ 700.888263] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 700.888263] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] return self.wait_for_task(task_ref) [ 700.888263] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 700.888263] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] return evt.wait() [ 700.888263] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 700.888263] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] result = hub.switch() [ 700.888263] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 700.888263] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] return self.greenlet.switch() [ 700.888580] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 700.888580] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] self.f(*self.args, **self.kw) [ 700.888580] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 700.888580] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] raise exceptions.translate_fault(task_info.error) [ 700.888580] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 700.888580] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] Faults: ['InvalidArgument'] [ 700.888580] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] [ 700.888580] env[68798]: INFO nova.compute.manager [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Terminating instance [ 700.888897] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.889153] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 700.889413] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e4a9c358-69ed-4a50-9a98-181889925f60 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.891730] env[68798]: DEBUG nova.compute.manager [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 700.892009] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 700.892804] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f20030-a439-4ac6-8d69-e75d8afdd3dd {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.900706] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 700.900934] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5103d5b2-5919-47b2-be39-162ffa1a1e4e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.903471] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 700.903651] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 700.904663] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da188e0d-4e4e-4df8-89b4-076889c16658 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.910574] env[68798]: DEBUG oslo_vmware.api [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Waiting for the task: (returnval){ [ 700.910574] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]5257c0c8-0d04-5da9-9b1c-3651c300d0b0" [ 700.910574] env[68798]: _type = "Task" [ 700.910574] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.919012] env[68798]: DEBUG oslo_vmware.api [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]5257c0c8-0d04-5da9-9b1c-3651c300d0b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.979585] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 700.980075] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 700.980075] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Deleting the datastore file [datastore1] df45083e-ece4-4768-bc08-022fe0185117 {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 700.980269] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4ec00b93-a086-4495-9ac2-275ff4e1f4a0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.988666] env[68798]: DEBUG oslo_vmware.api [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Waiting for the task: (returnval){ [ 700.988666] env[68798]: value = "task-4217556" [ 700.988666] env[68798]: _type = "Task" [ 700.988666] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.997200] env[68798]: DEBUG oslo_vmware.api [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Task: {'id': task-4217556, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.421212] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 701.421479] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Creating directory with path [datastore1] vmware_temp/1d028898-a625-45b5-b0d9-c1ea98503118/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 701.421718] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4a534813-18c5-4c49-911e-b58a570a1ec9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.433905] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Created directory with path [datastore1] vmware_temp/1d028898-a625-45b5-b0d9-c1ea98503118/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 701.434107] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Fetch image to [datastore1] vmware_temp/1d028898-a625-45b5-b0d9-c1ea98503118/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 701.434285] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/1d028898-a625-45b5-b0d9-c1ea98503118/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 701.435044] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a92b8d54-59f2-4b0b-8230-cea10be6ef1a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.441970] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28f76756-eea3-4f07-a9f3-6f7e89f467ae {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.451412] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d2216db-12a1-49ad-a0c7-2b277565ed14 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.483293] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-992e7671-ac0d-4533-a1d0-0bfa336af3ff {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.492503] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-549fcb47-2d78-401a-9acd-4994019f29a0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.500539] env[68798]: DEBUG oslo_vmware.api [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Task: {'id': task-4217556, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.09259} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.500884] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 701.501125] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 701.501385] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 701.501568] env[68798]: INFO nova.compute.manager [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Took 0.61 seconds to destroy the instance on the hypervisor. [ 701.503934] env[68798]: DEBUG nova.compute.claims [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 701.504128] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 701.504352] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 701.517789] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 701.574289] env[68798]: DEBUG oslo_vmware.rw_handles [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1d028898-a625-45b5-b0d9-c1ea98503118/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 701.637020] env[68798]: DEBUG oslo_vmware.rw_handles [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 701.637020] env[68798]: DEBUG oslo_vmware.rw_handles [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1d028898-a625-45b5-b0d9-c1ea98503118/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 702.020785] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b77bfe81-5757-4fa8-b45c-6fbac3cb91de {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.028621] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66299f4d-a03b-42da-8ffe-8b47446b3155 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.060170] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-012cabc6-010d-480b-ad53-c67f56ce9fb8 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.069134] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c97f9e1-4db3-49d2-a247-08793ce750f3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.083972] env[68798]: DEBUG nova.compute.provider_tree [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 702.093054] env[68798]: DEBUG nova.scheduler.client.report [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 702.114494] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.610s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 702.114836] env[68798]: ERROR nova.compute.manager [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 702.114836] env[68798]: Faults: ['InvalidArgument'] [ 702.114836] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] Traceback (most recent call last): [ 702.114836] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 702.114836] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] self.driver.spawn(context, instance, image_meta, [ 702.114836] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 702.114836] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] self._vmops.spawn(context, instance, image_meta, injected_files, [ 702.114836] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 702.114836] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] self._fetch_image_if_missing(context, vi) [ 702.114836] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 702.114836] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] image_cache(vi, tmp_image_ds_loc) [ 702.114836] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 702.115234] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] vm_util.copy_virtual_disk( [ 702.115234] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 702.115234] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] session._wait_for_task(vmdk_copy_task) [ 702.115234] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 702.115234] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] return self.wait_for_task(task_ref) [ 702.115234] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 702.115234] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] return evt.wait() [ 702.115234] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 702.115234] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] result = hub.switch() [ 702.115234] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 702.115234] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] return self.greenlet.switch() [ 702.115234] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 702.115234] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] self.f(*self.args, **self.kw) [ 702.115567] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 702.115567] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] raise exceptions.translate_fault(task_info.error) [ 702.115567] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 702.115567] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] Faults: ['InvalidArgument'] [ 702.115567] env[68798]: ERROR nova.compute.manager [instance: df45083e-ece4-4768-bc08-022fe0185117] [ 702.115567] env[68798]: DEBUG nova.compute.utils [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 702.117041] env[68798]: DEBUG nova.compute.manager [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Build of instance df45083e-ece4-4768-bc08-022fe0185117 was re-scheduled: A specified parameter was not correct: fileType [ 702.117041] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 702.117409] env[68798]: DEBUG nova.compute.manager [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 702.117578] env[68798]: DEBUG nova.compute.manager [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 702.117752] env[68798]: DEBUG nova.compute.manager [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 702.117924] env[68798]: DEBUG nova.network.neutron [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 702.533405] env[68798]: DEBUG nova.network.neutron [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.550230] env[68798]: INFO nova.compute.manager [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: df45083e-ece4-4768-bc08-022fe0185117] Took 0.43 seconds to deallocate network for instance. [ 702.674514] env[68798]: INFO nova.scheduler.client.report [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Deleted allocations for instance df45083e-ece4-4768-bc08-022fe0185117 [ 702.695207] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0c5c0c25-b10f-4e4f-9f34-927aca811870 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Lock "df45083e-ece4-4768-bc08-022fe0185117" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 99.725s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 702.710242] env[68798]: DEBUG nova.compute.manager [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 702.771104] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 702.771104] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 702.771104] env[68798]: INFO nova.compute.claims [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 703.246728] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acb3167c-7b39-44a1-8a7b-8daa87785cf3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.254435] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb61d90c-779a-4e7e-ad70-b71d8261e6b1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.285641] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-161d218d-4bbd-4fd9-8033-f040ede7e551 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.294373] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0873887d-a4b5-4a65-883a-f97716cad2cf {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.307789] env[68798]: DEBUG nova.compute.provider_tree [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 703.316322] env[68798]: DEBUG nova.scheduler.client.report [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 703.333335] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.565s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 703.333848] env[68798]: DEBUG nova.compute.manager [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 703.370770] env[68798]: DEBUG nova.compute.utils [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 703.372361] env[68798]: DEBUG nova.compute.manager [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 703.372536] env[68798]: DEBUG nova.network.neutron [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 703.381886] env[68798]: DEBUG nova.compute.manager [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 703.450552] env[68798]: DEBUG nova.compute.manager [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 703.477992] env[68798]: DEBUG nova.policy [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6d452ac7a1d14f1bab6132748c72ef05', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a0888c5e1b4a48b79726278612a5e53d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 703.482796] env[68798]: DEBUG nova.virt.hardware [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 703.483028] env[68798]: DEBUG nova.virt.hardware [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 703.483193] env[68798]: DEBUG nova.virt.hardware [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 703.487019] env[68798]: DEBUG nova.virt.hardware [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 703.487019] env[68798]: DEBUG nova.virt.hardware [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 703.487019] env[68798]: DEBUG nova.virt.hardware [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 703.487019] env[68798]: DEBUG nova.virt.hardware [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 703.487019] env[68798]: DEBUG nova.virt.hardware [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 703.487450] env[68798]: DEBUG nova.virt.hardware [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 703.487450] env[68798]: DEBUG nova.virt.hardware [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 703.487450] env[68798]: DEBUG nova.virt.hardware [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 703.487450] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6dc936-225f-4556-be57-d7d420499903 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.493731] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72ec1793-5668-4fb7-a6f2-7ca23f54aa95 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.927657] env[68798]: DEBUG nova.network.neutron [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Successfully created port: 878018ef-f45b-4972-bfce-617a6e56a46d {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 704.218985] env[68798]: DEBUG oslo_concurrency.lockutils [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Acquiring lock "fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.218985] env[68798]: DEBUG oslo_concurrency.lockutils [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Lock "fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 705.038269] env[68798]: DEBUG nova.network.neutron [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Successfully updated port: 878018ef-f45b-4972-bfce-617a6e56a46d {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 705.052862] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Acquiring lock "refresh_cache-cdb141da-a05c-4891-a33d-6e12eafe4f22" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 705.053057] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Acquired lock "refresh_cache-cdb141da-a05c-4891-a33d-6e12eafe4f22" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.053238] env[68798]: DEBUG nova.network.neutron [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 705.127080] env[68798]: DEBUG nova.network.neutron [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 705.146151] env[68798]: DEBUG nova.compute.manager [req-9db62b13-bd3e-487e-bab2-545c4585524b req-1aa6d52c-b3c4-48ae-9c64-98daae1e998d service nova] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Received event network-vif-plugged-878018ef-f45b-4972-bfce-617a6e56a46d {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 705.146151] env[68798]: DEBUG oslo_concurrency.lockutils [req-9db62b13-bd3e-487e-bab2-545c4585524b req-1aa6d52c-b3c4-48ae-9c64-98daae1e998d service nova] Acquiring lock "cdb141da-a05c-4891-a33d-6e12eafe4f22-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 705.146151] env[68798]: DEBUG oslo_concurrency.lockutils [req-9db62b13-bd3e-487e-bab2-545c4585524b req-1aa6d52c-b3c4-48ae-9c64-98daae1e998d service nova] Lock "cdb141da-a05c-4891-a33d-6e12eafe4f22-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 705.146151] env[68798]: DEBUG oslo_concurrency.lockutils [req-9db62b13-bd3e-487e-bab2-545c4585524b req-1aa6d52c-b3c4-48ae-9c64-98daae1e998d service nova] Lock "cdb141da-a05c-4891-a33d-6e12eafe4f22-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 705.146287] env[68798]: DEBUG nova.compute.manager [req-9db62b13-bd3e-487e-bab2-545c4585524b req-1aa6d52c-b3c4-48ae-9c64-98daae1e998d service nova] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] No waiting events found dispatching network-vif-plugged-878018ef-f45b-4972-bfce-617a6e56a46d {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 705.146287] env[68798]: WARNING nova.compute.manager [req-9db62b13-bd3e-487e-bab2-545c4585524b req-1aa6d52c-b3c4-48ae-9c64-98daae1e998d service nova] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Received unexpected event network-vif-plugged-878018ef-f45b-4972-bfce-617a6e56a46d for instance with vm_state building and task_state spawning. [ 705.341368] env[68798]: DEBUG nova.network.neutron [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Updating instance_info_cache with network_info: [{"id": "878018ef-f45b-4972-bfce-617a6e56a46d", "address": "fa:16:3e:0c:b5:84", "network": {"id": "12716be4-cab4-4ea9-8845-46c27236beb5", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1751665085-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a0888c5e1b4a48b79726278612a5e53d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap878018ef-f4", "ovs_interfaceid": "878018ef-f45b-4972-bfce-617a6e56a46d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.355645] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Releasing lock "refresh_cache-cdb141da-a05c-4891-a33d-6e12eafe4f22" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 705.355962] env[68798]: DEBUG nova.compute.manager [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Instance network_info: |[{"id": "878018ef-f45b-4972-bfce-617a6e56a46d", "address": "fa:16:3e:0c:b5:84", "network": {"id": "12716be4-cab4-4ea9-8845-46c27236beb5", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1751665085-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a0888c5e1b4a48b79726278612a5e53d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap878018ef-f4", "ovs_interfaceid": "878018ef-f45b-4972-bfce-617a6e56a46d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 705.356395] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:b5:84', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '878018ef-f45b-4972-bfce-617a6e56a46d', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 705.364146] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Creating folder: Project (a0888c5e1b4a48b79726278612a5e53d). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 705.364744] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4ba70148-824e-484a-8d57-9aa1af300fa1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.378018] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Created folder: Project (a0888c5e1b4a48b79726278612a5e53d) in parent group-v834492. [ 705.378018] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Creating folder: Instances. Parent ref: group-v834530. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 705.378422] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a3607e10-e36e-4901-bd66-7cfc37dde266 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.394025] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Created folder: Instances in parent group-v834530. [ 705.394025] env[68798]: DEBUG oslo.service.loopingcall [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 705.394025] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 705.394025] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-20472ef2-3fd3-47c9-a01d-a70912017370 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.418851] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 705.418851] env[68798]: value = "task-4217559" [ 705.418851] env[68798]: _type = "Task" [ 705.418851] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.426750] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217559, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.929695] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217559, 'name': CreateVM_Task, 'duration_secs': 0.308108} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.929927] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 705.930593] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 705.930787] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.931123] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 705.931393] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79f3d070-efc4-464c-9ded-d984c7bbeafd {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.936876] env[68798]: DEBUG oslo_vmware.api [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Waiting for the task: (returnval){ [ 705.936876] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]5260df32-9486-1144-6172-ee6026a79121" [ 705.936876] env[68798]: _type = "Task" [ 705.936876] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.946263] env[68798]: DEBUG oslo_vmware.api [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]5260df32-9486-1144-6172-ee6026a79121, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.447591] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 706.447876] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 706.448064] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 707.176563] env[68798]: DEBUG nova.compute.manager [req-8bf18a31-584f-4d05-91c0-990cae8b2075 req-5e183680-1a74-4a03-a91f-d585a6410374 service nova] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Received event network-changed-878018ef-f45b-4972-bfce-617a6e56a46d {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 707.176780] env[68798]: DEBUG nova.compute.manager [req-8bf18a31-584f-4d05-91c0-990cae8b2075 req-5e183680-1a74-4a03-a91f-d585a6410374 service nova] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Refreshing instance network info cache due to event network-changed-878018ef-f45b-4972-bfce-617a6e56a46d. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 707.176994] env[68798]: DEBUG oslo_concurrency.lockutils [req-8bf18a31-584f-4d05-91c0-990cae8b2075 req-5e183680-1a74-4a03-a91f-d585a6410374 service nova] Acquiring lock "refresh_cache-cdb141da-a05c-4891-a33d-6e12eafe4f22" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 707.177153] env[68798]: DEBUG oslo_concurrency.lockutils [req-8bf18a31-584f-4d05-91c0-990cae8b2075 req-5e183680-1a74-4a03-a91f-d585a6410374 service nova] Acquired lock "refresh_cache-cdb141da-a05c-4891-a33d-6e12eafe4f22" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.177316] env[68798]: DEBUG nova.network.neutron [req-8bf18a31-584f-4d05-91c0-990cae8b2075 req-5e183680-1a74-4a03-a91f-d585a6410374 service nova] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Refreshing network info cache for port 878018ef-f45b-4972-bfce-617a6e56a46d {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 707.515107] env[68798]: DEBUG nova.network.neutron [req-8bf18a31-584f-4d05-91c0-990cae8b2075 req-5e183680-1a74-4a03-a91f-d585a6410374 service nova] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Updated VIF entry in instance network info cache for port 878018ef-f45b-4972-bfce-617a6e56a46d. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 707.515478] env[68798]: DEBUG nova.network.neutron [req-8bf18a31-584f-4d05-91c0-990cae8b2075 req-5e183680-1a74-4a03-a91f-d585a6410374 service nova] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Updating instance_info_cache with network_info: [{"id": "878018ef-f45b-4972-bfce-617a6e56a46d", "address": "fa:16:3e:0c:b5:84", "network": {"id": "12716be4-cab4-4ea9-8845-46c27236beb5", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1751665085-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a0888c5e1b4a48b79726278612a5e53d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap878018ef-f4", "ovs_interfaceid": "878018ef-f45b-4972-bfce-617a6e56a46d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.527719] env[68798]: DEBUG oslo_concurrency.lockutils [req-8bf18a31-584f-4d05-91c0-990cae8b2075 req-5e183680-1a74-4a03-a91f-d585a6410374 service nova] Releasing lock "refresh_cache-cdb141da-a05c-4891-a33d-6e12eafe4f22" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 736.286692] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 736.287078] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 736.287149] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 736.287275] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 736.309098] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 736.309267] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 748df295-743b-41be-b873-523b688f2c78] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 736.309401] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 736.309531] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 736.309652] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 736.309774] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 736.309895] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 736.310027] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 736.310153] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 736.310273] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 736.310395] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 736.310912] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 736.311161] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 736.311328] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 736.311584] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 737.048440] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 737.048629] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 738.048284] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 738.048628] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 738.060908] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.061198] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.061377] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.061534] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 738.062683] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b23dab41-9b19-4b91-90d0-7bdf40c9d843 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.072123] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76625bbd-83a1-413e-94ad-0d32d07e4c74 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.087937] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa78d88a-6e5f-4132-9453-733c2866a2d9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.095291] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-339317a4-3f70-4b3a-b91c-a4e735538e41 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.124588] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180730MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 738.124787] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.124951] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.201825] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 4995301f-e3c3-4032-adf0-7cffa3497d3e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 738.201990] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 748df295-743b-41be-b873-523b688f2c78 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 738.202140] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 738.202266] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance c1606420-0fd3-4bd3-a8fa-91772c11f9bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 738.202388] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 90d9df19-2d93-4543-a650-4a624f505d5d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 738.202506] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 620ef3f6-0444-474d-8179-3dc0143f2e99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 738.202623] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e060aaea-7508-46ed-8786-b5753fde75e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 738.202747] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 1ecf18e5-a4a1-4efb-b54a-964b064b51e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 738.202862] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 3bf7d713-8315-48d9-85dd-4ff09c9c7782 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 738.202975] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance cdb141da-a05c-4891-a33d-6e12eafe4f22 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 738.214810] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 551707b9-118e-45c8-a28f-e70486272f6e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 738.225721] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 89f660c8-6efd-4789-90ee-67e42abc1db7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 738.235925] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 2fc5f330-2a23-4a67-a49c-c4985928417b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 738.246807] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 9d533b44-6afa-4c5b-a0a6-90ff442f7771 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 738.257888] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ad71bb4b-829d-4297-857f-249e4c499623 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 738.271422] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 9778caa6-4d64-483e-9b72-e82c9977f9ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 738.281883] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 6a79f513-a8f0-4ceb-b4fd-466474dd67c8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 738.293072] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ced820f2-8c09-4000-8f46-e0c5909e5b2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 738.303783] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 9c094711-2653-4a44-a4b0-020621beb4fe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 738.314244] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 092cc50b-edcc-4d9b-a981-a06e2eaaf321 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 738.325056] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 9923a3c7-f090-4a01-8c57-36c8c22c6b14 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 738.335138] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e1925875-04db-4b88-ad54-d33ff804d365 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 738.346419] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e3dfc177-6f11-48e4-bbac-83bda39fbb8d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 738.357493] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ceb21573-1cb0-4af6-9897-f164997b4b5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 738.368235] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 10442eb5-8f1a-4b4c-9aab-78605de8dfea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 738.379104] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 0d4e5325-aa25-4766-a490-9719b3f354c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 738.391697] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 0913eee8-aa9a-4e1b-8aec-48d8a9197530 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 738.401870] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance c59a14e2-3655-4177-961a-34552be1ccb1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 738.411853] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 6cf752ef-a49e-477f-8297-59621e69e712 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 738.421870] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 2afc987a-45cc-484e-a6f0-a0118e2e73eb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 738.431701] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 8f9bdc2e-bc45-46f8-a1dd-b98046bddc2c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 738.442203] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 95cffda2-8119-4401-ac53-9d7042ef7180 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 738.451684] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 1752a71d-8cc7-4f88-b097-53094df226e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 738.461850] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 738.462107] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 738.462262] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 738.897715] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdb15196-c8ef-46a4-a6a2-6ec285268a50 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.905676] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79ac0d28-c008-4a04-bfff-d6e98fd43b0b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.936307] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-845eba31-899a-413e-8506-0c6a63be6662 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.945227] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e0efec-5209-481f-8bf0-f2cb0b5e2144 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.959628] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 738.969042] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 738.988650] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 738.988836] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.864s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.537380] env[68798]: WARNING oslo_vmware.rw_handles [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 747.537380] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 747.537380] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 747.537380] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 747.537380] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 747.537380] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 747.537380] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 747.537380] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 747.537380] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 747.537380] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 747.537380] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 747.537380] env[68798]: ERROR oslo_vmware.rw_handles [ 747.537950] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/1d028898-a625-45b5-b0d9-c1ea98503118/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 747.539361] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 747.539624] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Copying Virtual Disk [datastore1] vmware_temp/1d028898-a625-45b5-b0d9-c1ea98503118/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/1d028898-a625-45b5-b0d9-c1ea98503118/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 747.539997] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-426288ee-3cbe-4ba3-a097-0812945d970b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.549970] env[68798]: DEBUG oslo_vmware.api [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Waiting for the task: (returnval){ [ 747.549970] env[68798]: value = "task-4217560" [ 747.549970] env[68798]: _type = "Task" [ 747.549970] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.558469] env[68798]: DEBUG oslo_vmware.api [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Task: {'id': task-4217560, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.060783] env[68798]: DEBUG oslo_vmware.exceptions [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 748.061160] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 748.061811] env[68798]: ERROR nova.compute.manager [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 748.061811] env[68798]: Faults: ['InvalidArgument'] [ 748.061811] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Traceback (most recent call last): [ 748.061811] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 748.061811] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] yield resources [ 748.061811] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 748.061811] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] self.driver.spawn(context, instance, image_meta, [ 748.061811] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 748.061811] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 748.061811] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 748.061811] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] self._fetch_image_if_missing(context, vi) [ 748.061811] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 748.061811] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] image_cache(vi, tmp_image_ds_loc) [ 748.062215] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 748.062215] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] vm_util.copy_virtual_disk( [ 748.062215] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 748.062215] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] session._wait_for_task(vmdk_copy_task) [ 748.062215] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 748.062215] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] return self.wait_for_task(task_ref) [ 748.062215] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 748.062215] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] return evt.wait() [ 748.062215] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 748.062215] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] result = hub.switch() [ 748.062215] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 748.062215] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] return self.greenlet.switch() [ 748.062215] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 748.062557] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] self.f(*self.args, **self.kw) [ 748.062557] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 748.062557] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] raise exceptions.translate_fault(task_info.error) [ 748.062557] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 748.062557] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Faults: ['InvalidArgument'] [ 748.062557] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] [ 748.062557] env[68798]: INFO nova.compute.manager [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Terminating instance [ 748.063809] env[68798]: DEBUG oslo_concurrency.lockutils [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.064025] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 748.064696] env[68798]: DEBUG nova.compute.manager [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 748.064890] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 748.065136] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2984ca3a-c425-4764-8adb-ecf745df9d08 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.067727] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddbf14e5-b7a3-4e9d-a9c3-8fba50b0c46f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.074939] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 748.075210] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-733069fa-1f8b-4d0a-901f-46adc539c1d8 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.077766] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 748.077943] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 748.078946] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fa252cc-4392-4b38-85df-182fa5a07c5e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.084876] env[68798]: DEBUG oslo_vmware.api [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Waiting for the task: (returnval){ [ 748.084876] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]5251c5c8-c009-2df2-780b-0ac5b63a7b29" [ 748.084876] env[68798]: _type = "Task" [ 748.084876] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.098482] env[68798]: DEBUG oslo_vmware.api [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]5251c5c8-c009-2df2-780b-0ac5b63a7b29, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.159517] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 748.159517] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 748.159657] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Deleting the datastore file [datastore1] 4995301f-e3c3-4032-adf0-7cffa3497d3e {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 748.159979] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7486fb2d-4242-4e24-8094-a8824d514137 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.166642] env[68798]: DEBUG oslo_vmware.api [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Waiting for the task: (returnval){ [ 748.166642] env[68798]: value = "task-4217562" [ 748.166642] env[68798]: _type = "Task" [ 748.166642] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.175093] env[68798]: DEBUG oslo_vmware.api [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Task: {'id': task-4217562, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.595334] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 748.595640] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Creating directory with path [datastore1] vmware_temp/fd158615-c304-4440-ac07-6344ee05e9c1/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 748.595875] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1283c2d0-41ce-423e-af4b-12ba650b5265 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.609350] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Created directory with path [datastore1] vmware_temp/fd158615-c304-4440-ac07-6344ee05e9c1/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 748.609701] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Fetch image to [datastore1] vmware_temp/fd158615-c304-4440-ac07-6344ee05e9c1/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 748.610014] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/fd158615-c304-4440-ac07-6344ee05e9c1/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 748.611238] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3856d499-a688-491f-b4dc-575ee5470929 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.621841] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c53f6f-164f-4058-9966-a6549c83c906 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.633194] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae677347-5048-4194-81d5-086f9215b8a6 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.668058] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab03b89d-8026-493f-b195-db9019e33378 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.679632] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c48c4de3-17cd-449d-b6a6-b91c19f70cb9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.681572] env[68798]: DEBUG oslo_vmware.api [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Task: {'id': task-4217562, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077348} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.681810] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 748.682029] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 748.682220] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 748.682392] env[68798]: INFO nova.compute.manager [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Took 0.62 seconds to destroy the instance on the hypervisor. [ 748.684971] env[68798]: DEBUG nova.compute.claims [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 748.685155] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.685364] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.713996] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 748.770179] env[68798]: DEBUG oslo_vmware.rw_handles [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fd158615-c304-4440-ac07-6344ee05e9c1/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 748.828861] env[68798]: DEBUG oslo_vmware.rw_handles [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 748.829064] env[68798]: DEBUG oslo_vmware.rw_handles [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fd158615-c304-4440-ac07-6344ee05e9c1/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 749.190178] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c516d785-716a-42c8-88a6-a88cdb45417b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.198250] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b219bb1-966c-4b30-8d92-63c3346820cb {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.228160] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7bd3e79-b551-4252-932e-2a7a278e3302 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.236259] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6b9e431-4296-4e72-a38f-ce9309e038e2 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.251277] env[68798]: DEBUG nova.compute.provider_tree [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 749.259649] env[68798]: DEBUG nova.scheduler.client.report [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 749.273574] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.588s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.274134] env[68798]: ERROR nova.compute.manager [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 749.274134] env[68798]: Faults: ['InvalidArgument'] [ 749.274134] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Traceback (most recent call last): [ 749.274134] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 749.274134] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] self.driver.spawn(context, instance, image_meta, [ 749.274134] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 749.274134] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 749.274134] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 749.274134] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] self._fetch_image_if_missing(context, vi) [ 749.274134] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 749.274134] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] image_cache(vi, tmp_image_ds_loc) [ 749.274134] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 749.274470] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] vm_util.copy_virtual_disk( [ 749.274470] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 749.274470] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] session._wait_for_task(vmdk_copy_task) [ 749.274470] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 749.274470] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] return self.wait_for_task(task_ref) [ 749.274470] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 749.274470] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] return evt.wait() [ 749.274470] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 749.274470] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] result = hub.switch() [ 749.274470] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 749.274470] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] return self.greenlet.switch() [ 749.274470] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 749.274470] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] self.f(*self.args, **self.kw) [ 749.274746] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 749.274746] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] raise exceptions.translate_fault(task_info.error) [ 749.274746] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 749.274746] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Faults: ['InvalidArgument'] [ 749.274746] env[68798]: ERROR nova.compute.manager [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] [ 749.275095] env[68798]: DEBUG nova.compute.utils [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 749.276266] env[68798]: DEBUG nova.compute.manager [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Build of instance 4995301f-e3c3-4032-adf0-7cffa3497d3e was re-scheduled: A specified parameter was not correct: fileType [ 749.276266] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 749.276636] env[68798]: DEBUG nova.compute.manager [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 749.276806] env[68798]: DEBUG nova.compute.manager [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 749.276973] env[68798]: DEBUG nova.compute.manager [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 749.277151] env[68798]: DEBUG nova.network.neutron [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 749.671010] env[68798]: DEBUG nova.network.neutron [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.685298] env[68798]: INFO nova.compute.manager [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 4995301f-e3c3-4032-adf0-7cffa3497d3e] Took 0.41 seconds to deallocate network for instance. [ 749.808840] env[68798]: INFO nova.scheduler.client.report [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Deleted allocations for instance 4995301f-e3c3-4032-adf0-7cffa3497d3e [ 749.838502] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ce0de006-6c38-4778-837a-99b9d302d691 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "4995301f-e3c3-4032-adf0-7cffa3497d3e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 145.417s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.859190] env[68798]: DEBUG nova.compute.manager [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 749.915651] env[68798]: DEBUG oslo_concurrency.lockutils [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.915651] env[68798]: DEBUG oslo_concurrency.lockutils [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 749.917022] env[68798]: INFO nova.compute.claims [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 750.400035] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfaf04fe-14dc-4055-a5a3-f83061694e60 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.409075] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c79d0ff-94de-4626-ac3d-5efb1241ad55 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.441088] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a3c89c7-a5f4-4969-a9a6-0e5286c2e14b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.448906] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e042e5f-77df-4c84-97db-ff52a71b7e28 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.462621] env[68798]: DEBUG nova.compute.provider_tree [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 750.471125] env[68798]: DEBUG nova.scheduler.client.report [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 750.487965] env[68798]: DEBUG oslo_concurrency.lockutils [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.573s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.488656] env[68798]: DEBUG nova.compute.manager [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 750.527816] env[68798]: DEBUG nova.compute.utils [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 750.529173] env[68798]: DEBUG nova.compute.manager [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 750.529347] env[68798]: DEBUG nova.network.neutron [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 750.544030] env[68798]: DEBUG nova.compute.manager [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 750.618892] env[68798]: DEBUG nova.compute.manager [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 750.626310] env[68798]: DEBUG nova.policy [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8189c68fc6cc4ad0a41a4b86683e4320', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0d682f8d5d43405894ddd7c255618e2a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 750.646944] env[68798]: DEBUG nova.virt.hardware [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 750.647225] env[68798]: DEBUG nova.virt.hardware [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 750.647396] env[68798]: DEBUG nova.virt.hardware [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 750.647579] env[68798]: DEBUG nova.virt.hardware [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 750.647725] env[68798]: DEBUG nova.virt.hardware [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 750.647909] env[68798]: DEBUG nova.virt.hardware [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 750.648272] env[68798]: DEBUG nova.virt.hardware [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 750.648434] env[68798]: DEBUG nova.virt.hardware [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 750.648633] env[68798]: DEBUG nova.virt.hardware [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 750.648808] env[68798]: DEBUG nova.virt.hardware [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 750.648983] env[68798]: DEBUG nova.virt.hardware [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 750.649846] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d4ec8ae-8b6a-4de8-8fc6-1f3ed787586e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.658859] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92cde83-e351-4dc1-b05b-b0ef7644ec8c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.089062] env[68798]: DEBUG nova.network.neutron [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Successfully created port: 2e4ca566-a910-48b0-86db-871dcebcfc0d {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 751.428804] env[68798]: DEBUG oslo_concurrency.lockutils [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquiring lock "e848c3f4-64ff-4956-88e0-afa27be73068" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.429207] env[68798]: DEBUG oslo_concurrency.lockutils [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "e848c3f4-64ff-4956-88e0-afa27be73068" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.211848] env[68798]: DEBUG nova.network.neutron [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Successfully updated port: 2e4ca566-a910-48b0-86db-871dcebcfc0d {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 752.228528] env[68798]: DEBUG oslo_concurrency.lockutils [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Acquiring lock "refresh_cache-551707b9-118e-45c8-a28f-e70486272f6e" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 752.228747] env[68798]: DEBUG oslo_concurrency.lockutils [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Acquired lock "refresh_cache-551707b9-118e-45c8-a28f-e70486272f6e" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.228968] env[68798]: DEBUG nova.network.neutron [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 752.268884] env[68798]: DEBUG nova.compute.manager [req-8d545d70-7d7f-4b94-85e6-3a255740a7de req-491496c3-2909-4a4e-9d82-e33cf9b014bd service nova] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Received event network-vif-plugged-2e4ca566-a910-48b0-86db-871dcebcfc0d {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 752.269180] env[68798]: DEBUG oslo_concurrency.lockutils [req-8d545d70-7d7f-4b94-85e6-3a255740a7de req-491496c3-2909-4a4e-9d82-e33cf9b014bd service nova] Acquiring lock "551707b9-118e-45c8-a28f-e70486272f6e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.269369] env[68798]: DEBUG oslo_concurrency.lockutils [req-8d545d70-7d7f-4b94-85e6-3a255740a7de req-491496c3-2909-4a4e-9d82-e33cf9b014bd service nova] Lock "551707b9-118e-45c8-a28f-e70486272f6e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.269497] env[68798]: DEBUG oslo_concurrency.lockutils [req-8d545d70-7d7f-4b94-85e6-3a255740a7de req-491496c3-2909-4a4e-9d82-e33cf9b014bd service nova] Lock "551707b9-118e-45c8-a28f-e70486272f6e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 752.269687] env[68798]: DEBUG nova.compute.manager [req-8d545d70-7d7f-4b94-85e6-3a255740a7de req-491496c3-2909-4a4e-9d82-e33cf9b014bd service nova] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] No waiting events found dispatching network-vif-plugged-2e4ca566-a910-48b0-86db-871dcebcfc0d {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 752.269851] env[68798]: WARNING nova.compute.manager [req-8d545d70-7d7f-4b94-85e6-3a255740a7de req-491496c3-2909-4a4e-9d82-e33cf9b014bd service nova] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Received unexpected event network-vif-plugged-2e4ca566-a910-48b0-86db-871dcebcfc0d for instance with vm_state building and task_state spawning. [ 752.289471] env[68798]: DEBUG nova.network.neutron [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 752.554875] env[68798]: DEBUG nova.network.neutron [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Updating instance_info_cache with network_info: [{"id": "2e4ca566-a910-48b0-86db-871dcebcfc0d", "address": "fa:16:3e:0d:5d:a8", "network": {"id": "52893821-b85a-43a0-8a7b-6b2703118846", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1659053968-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d682f8d5d43405894ddd7c255618e2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e4ca566-a9", "ovs_interfaceid": "2e4ca566-a910-48b0-86db-871dcebcfc0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.566476] env[68798]: DEBUG oslo_concurrency.lockutils [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Releasing lock "refresh_cache-551707b9-118e-45c8-a28f-e70486272f6e" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 752.566807] env[68798]: DEBUG nova.compute.manager [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Instance network_info: |[{"id": "2e4ca566-a910-48b0-86db-871dcebcfc0d", "address": "fa:16:3e:0d:5d:a8", "network": {"id": "52893821-b85a-43a0-8a7b-6b2703118846", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1659053968-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d682f8d5d43405894ddd7c255618e2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e4ca566-a9", "ovs_interfaceid": "2e4ca566-a910-48b0-86db-871dcebcfc0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 752.567277] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0d:5d:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5fb99c57-eaa0-447b-bb33-baced85d9c00', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2e4ca566-a910-48b0-86db-871dcebcfc0d', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 752.575158] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Creating folder: Project (0d682f8d5d43405894ddd7c255618e2a). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 752.575704] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-df0b0885-4126-4eea-9fc0-edafa34bc9fd {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.586615] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Created folder: Project (0d682f8d5d43405894ddd7c255618e2a) in parent group-v834492. [ 752.586615] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Creating folder: Instances. Parent ref: group-v834533. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 752.587243] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3e962b25-181f-4786-92ea-46d7ed7ad69f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.596839] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Created folder: Instances in parent group-v834533. [ 752.597101] env[68798]: DEBUG oslo.service.loopingcall [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 752.597301] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 752.597518] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c76db1e3-a108-4644-882e-fbe8931d1ffa {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.620022] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 752.620022] env[68798]: value = "task-4217565" [ 752.620022] env[68798]: _type = "Task" [ 752.620022] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.626424] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217565, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.128709] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217565, 'name': CreateVM_Task, 'duration_secs': 0.354111} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.128891] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 753.129581] env[68798]: DEBUG oslo_concurrency.lockutils [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 753.129749] env[68798]: DEBUG oslo_concurrency.lockutils [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.130182] env[68798]: DEBUG oslo_concurrency.lockutils [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 753.130448] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-419d0fe6-7efa-43a6-bbc3-3027738a6c47 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.136027] env[68798]: DEBUG oslo_vmware.api [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Waiting for the task: (returnval){ [ 753.136027] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52131f4b-b6e2-ff64-268e-eaa678d28ad1" [ 753.136027] env[68798]: _type = "Task" [ 753.136027] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.149916] env[68798]: DEBUG oslo_vmware.api [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52131f4b-b6e2-ff64-268e-eaa678d28ad1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.648150] env[68798]: DEBUG oslo_concurrency.lockutils [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 753.648413] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 753.648641] env[68798]: DEBUG oslo_concurrency.lockutils [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 754.327795] env[68798]: DEBUG nova.compute.manager [req-0d1fafa9-81ac-4ab9-bbaa-825b146adcff req-4ff44a19-5808-452a-9e92-1c73bd89f58c service nova] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Received event network-changed-2e4ca566-a910-48b0-86db-871dcebcfc0d {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 754.328262] env[68798]: DEBUG nova.compute.manager [req-0d1fafa9-81ac-4ab9-bbaa-825b146adcff req-4ff44a19-5808-452a-9e92-1c73bd89f58c service nova] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Refreshing instance network info cache due to event network-changed-2e4ca566-a910-48b0-86db-871dcebcfc0d. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 754.328262] env[68798]: DEBUG oslo_concurrency.lockutils [req-0d1fafa9-81ac-4ab9-bbaa-825b146adcff req-4ff44a19-5808-452a-9e92-1c73bd89f58c service nova] Acquiring lock "refresh_cache-551707b9-118e-45c8-a28f-e70486272f6e" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 754.328899] env[68798]: DEBUG oslo_concurrency.lockutils [req-0d1fafa9-81ac-4ab9-bbaa-825b146adcff req-4ff44a19-5808-452a-9e92-1c73bd89f58c service nova] Acquired lock "refresh_cache-551707b9-118e-45c8-a28f-e70486272f6e" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.328899] env[68798]: DEBUG nova.network.neutron [req-0d1fafa9-81ac-4ab9-bbaa-825b146adcff req-4ff44a19-5808-452a-9e92-1c73bd89f58c service nova] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Refreshing network info cache for port 2e4ca566-a910-48b0-86db-871dcebcfc0d {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 754.685674] env[68798]: DEBUG nova.network.neutron [req-0d1fafa9-81ac-4ab9-bbaa-825b146adcff req-4ff44a19-5808-452a-9e92-1c73bd89f58c service nova] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Updated VIF entry in instance network info cache for port 2e4ca566-a910-48b0-86db-871dcebcfc0d. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 754.686071] env[68798]: DEBUG nova.network.neutron [req-0d1fafa9-81ac-4ab9-bbaa-825b146adcff req-4ff44a19-5808-452a-9e92-1c73bd89f58c service nova] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Updating instance_info_cache with network_info: [{"id": "2e4ca566-a910-48b0-86db-871dcebcfc0d", "address": "fa:16:3e:0d:5d:a8", "network": {"id": "52893821-b85a-43a0-8a7b-6b2703118846", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1659053968-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d682f8d5d43405894ddd7c255618e2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5fb99c57-eaa0-447b-bb33-baced85d9c00", "external-id": "nsx-vlan-transportzone-253", "segmentation_id": 253, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e4ca566-a9", "ovs_interfaceid": "2e4ca566-a910-48b0-86db-871dcebcfc0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.696786] env[68798]: DEBUG oslo_concurrency.lockutils [req-0d1fafa9-81ac-4ab9-bbaa-825b146adcff req-4ff44a19-5808-452a-9e92-1c73bd89f58c service nova] Releasing lock "refresh_cache-551707b9-118e-45c8-a28f-e70486272f6e" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 795.990625] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 796.043690] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 796.048475] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 796.048662] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 796.653959] env[68798]: WARNING oslo_vmware.rw_handles [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 796.653959] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 796.653959] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 796.653959] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 796.653959] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 796.653959] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 796.653959] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 796.653959] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 796.653959] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 796.653959] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 796.653959] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 796.653959] env[68798]: ERROR oslo_vmware.rw_handles [ 796.654462] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/fd158615-c304-4440-ac07-6344ee05e9c1/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 796.656617] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 796.656921] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Copying Virtual Disk [datastore1] vmware_temp/fd158615-c304-4440-ac07-6344ee05e9c1/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/fd158615-c304-4440-ac07-6344ee05e9c1/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 796.657303] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-55407f10-f983-4aff-af23-61fd8370b223 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.666060] env[68798]: DEBUG oslo_vmware.api [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Waiting for the task: (returnval){ [ 796.666060] env[68798]: value = "task-4217566" [ 796.666060] env[68798]: _type = "Task" [ 796.666060] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.048423] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 797.048756] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 797.048756] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 797.070365] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 748df295-743b-41be-b873-523b688f2c78] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 797.070532] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 797.070644] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 797.070773] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 797.070899] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 797.071035] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 797.071161] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 797.071281] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 797.071402] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 797.071523] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 797.071640] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 797.072174] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 797.072350] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 797.072537] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 797.177957] env[68798]: DEBUG oslo_vmware.exceptions [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 797.178317] env[68798]: DEBUG oslo_concurrency.lockutils [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 797.178932] env[68798]: ERROR nova.compute.manager [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 797.178932] env[68798]: Faults: ['InvalidArgument'] [ 797.178932] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] Traceback (most recent call last): [ 797.178932] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 797.178932] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] yield resources [ 797.178932] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 797.178932] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] self.driver.spawn(context, instance, image_meta, [ 797.178932] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 797.178932] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] self._vmops.spawn(context, instance, image_meta, injected_files, [ 797.178932] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 797.178932] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] self._fetch_image_if_missing(context, vi) [ 797.178932] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 797.179315] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] image_cache(vi, tmp_image_ds_loc) [ 797.179315] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 797.179315] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] vm_util.copy_virtual_disk( [ 797.179315] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 797.179315] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] session._wait_for_task(vmdk_copy_task) [ 797.179315] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 797.179315] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] return self.wait_for_task(task_ref) [ 797.179315] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 797.179315] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] return evt.wait() [ 797.179315] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 797.179315] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] result = hub.switch() [ 797.179315] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 797.179315] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] return self.greenlet.switch() [ 797.179757] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 797.179757] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] self.f(*self.args, **self.kw) [ 797.179757] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 797.179757] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] raise exceptions.translate_fault(task_info.error) [ 797.179757] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 797.179757] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] Faults: ['InvalidArgument'] [ 797.179757] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] [ 797.179757] env[68798]: INFO nova.compute.manager [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Terminating instance [ 797.180942] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.181172] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 797.181412] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2cb3f95e-edce-405a-b510-55fbf94f1c8b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.183934] env[68798]: DEBUG nova.compute.manager [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 797.184124] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 797.184885] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca325123-f5be-4be6-ab22-2e027cb62c39 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.192244] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 797.192500] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-19b992a7-e242-45a7-b3b3-788304654870 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.195058] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 797.195251] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 797.196240] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e74c8c6b-d565-400d-8b89-557503af5702 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.202499] env[68798]: DEBUG oslo_vmware.api [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Waiting for the task: (returnval){ [ 797.202499] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]521d3d73-2119-13a1-b4c8-0ece4864aa74" [ 797.202499] env[68798]: _type = "Task" [ 797.202499] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.210992] env[68798]: DEBUG oslo_vmware.api [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]521d3d73-2119-13a1-b4c8-0ece4864aa74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.272034] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 797.272034] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 797.272251] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Deleting the datastore file [datastore1] 748df295-743b-41be-b873-523b688f2c78 {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 797.272480] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1a105579-ac6f-4325-8f7d-e6d09184afec {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.280033] env[68798]: DEBUG oslo_vmware.api [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Waiting for the task: (returnval){ [ 797.280033] env[68798]: value = "task-4217568" [ 797.280033] env[68798]: _type = "Task" [ 797.280033] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.289423] env[68798]: DEBUG oslo_vmware.api [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Task: {'id': task-4217568, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.713533] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 797.713796] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Creating directory with path [datastore1] vmware_temp/72826798-4736-4776-a4c9-537fcd354696/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 797.714111] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-62dcca23-d906-48b2-9420-8eb353d1a998 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.728915] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Created directory with path [datastore1] vmware_temp/72826798-4736-4776-a4c9-537fcd354696/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 797.728915] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Fetch image to [datastore1] vmware_temp/72826798-4736-4776-a4c9-537fcd354696/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 797.728915] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/72826798-4736-4776-a4c9-537fcd354696/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 797.729745] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe51627-2c9b-4326-9592-9b204ffdcdd0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.738715] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed612c7d-f4d1-4cad-aa4a-12ba072cd70e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.749379] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc2767a-8e52-438b-9d21-72001ebf4fa3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.781130] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e50a0db9-e42c-4e05-b45c-8ffdc346ba7b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.798187] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d2ed6cee-7dc9-41c6-98b7-f8f178443465 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.800097] env[68798]: DEBUG oslo_vmware.api [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Task: {'id': task-4217568, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082231} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.800341] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 797.800521] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 797.800733] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 797.800856] env[68798]: INFO nova.compute.manager [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Took 0.62 seconds to destroy the instance on the hypervisor. [ 797.803087] env[68798]: DEBUG nova.compute.claims [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 797.803270] env[68798]: DEBUG oslo_concurrency.lockutils [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.803499] env[68798]: DEBUG oslo_concurrency.lockutils [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.823855] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 797.880577] env[68798]: DEBUG oslo_vmware.rw_handles [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/72826798-4736-4776-a4c9-537fcd354696/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 797.941582] env[68798]: DEBUG oslo_vmware.rw_handles [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 797.941826] env[68798]: DEBUG oslo_vmware.rw_handles [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/72826798-4736-4776-a4c9-537fcd354696/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 798.049122] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 798.075374] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 798.332861] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f827459-b428-4cc5-8c5e-fe549e5dd670 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.342747] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb4e94b-7bf3-4840-88fc-3dc0313f3b5f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.373283] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de62f201-66bf-44b0-964f-d3f352c9ba31 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.381298] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40cd4175-9dfa-47f3-a9ec-eed49eb0f340 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.396126] env[68798]: DEBUG nova.compute.provider_tree [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 798.404361] env[68798]: DEBUG nova.scheduler.client.report [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 798.420036] env[68798]: DEBUG oslo_concurrency.lockutils [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.616s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.420376] env[68798]: ERROR nova.compute.manager [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 798.420376] env[68798]: Faults: ['InvalidArgument'] [ 798.420376] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] Traceback (most recent call last): [ 798.420376] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 798.420376] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] self.driver.spawn(context, instance, image_meta, [ 798.420376] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 798.420376] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] self._vmops.spawn(context, instance, image_meta, injected_files, [ 798.420376] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 798.420376] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] self._fetch_image_if_missing(context, vi) [ 798.420376] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 798.420376] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] image_cache(vi, tmp_image_ds_loc) [ 798.420376] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 798.420744] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] vm_util.copy_virtual_disk( [ 798.420744] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 798.420744] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] session._wait_for_task(vmdk_copy_task) [ 798.420744] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 798.420744] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] return self.wait_for_task(task_ref) [ 798.420744] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 798.420744] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] return evt.wait() [ 798.420744] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 798.420744] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] result = hub.switch() [ 798.420744] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 798.420744] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] return self.greenlet.switch() [ 798.420744] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 798.420744] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] self.f(*self.args, **self.kw) [ 798.421070] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 798.421070] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] raise exceptions.translate_fault(task_info.error) [ 798.421070] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 798.421070] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] Faults: ['InvalidArgument'] [ 798.421070] env[68798]: ERROR nova.compute.manager [instance: 748df295-743b-41be-b873-523b688f2c78] [ 798.421206] env[68798]: DEBUG nova.compute.utils [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 798.422614] env[68798]: DEBUG nova.compute.manager [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Build of instance 748df295-743b-41be-b873-523b688f2c78 was re-scheduled: A specified parameter was not correct: fileType [ 798.422614] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 798.422985] env[68798]: DEBUG nova.compute.manager [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 798.423177] env[68798]: DEBUG nova.compute.manager [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 798.423332] env[68798]: DEBUG nova.compute.manager [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 798.423491] env[68798]: DEBUG nova.network.neutron [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 798.827284] env[68798]: DEBUG nova.network.neutron [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.844080] env[68798]: INFO nova.compute.manager [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] [instance: 748df295-743b-41be-b873-523b688f2c78] Took 0.42 seconds to deallocate network for instance. [ 798.959477] env[68798]: INFO nova.scheduler.client.report [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Deleted allocations for instance 748df295-743b-41be-b873-523b688f2c78 [ 798.987984] env[68798]: DEBUG oslo_concurrency.lockutils [None req-88cfe3a2-9590-4d87-b4cc-6f9ab3c2e1e4 tempest-ServerDiagnosticsNegativeTest-83400979 tempest-ServerDiagnosticsNegativeTest-83400979-project-member] Lock "748df295-743b-41be-b873-523b688f2c78" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 194.211s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.001226] env[68798]: DEBUG nova.compute.manager [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 799.048705] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 799.061040] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.061311] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.061369] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.061505] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 799.062775] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.063010] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.064484] env[68798]: INFO nova.compute.claims [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 799.068165] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-455066e1-7c97-4b68-87d8-4d0f7802c161 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.077877] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7d5c79f-092f-4e0f-938b-f7f3cb2658fc {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.099073] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67cad9de-eb34-4ea9-bfd4-e422515071d1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.105765] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10264fa5-b3c6-4b31-b84d-2fc1690e289e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.141888] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180731MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 799.142108] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.579989] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5ed356a-34e3-4e0e-847a-384500712a4b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.588688] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a3750d1-f78b-43c8-9601-ed093bfeccdf {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.619948] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4adcebd7-e9ad-4451-8395-21aca1abb40a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.628427] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4499f02e-4c36-43c5-bd6b-af0638c47080 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.643157] env[68798]: DEBUG nova.compute.provider_tree [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 799.651594] env[68798]: DEBUG nova.scheduler.client.report [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 799.669417] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.606s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.669928] env[68798]: DEBUG nova.compute.manager [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 799.672535] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.530s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.708670] env[68798]: DEBUG nova.compute.utils [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 799.711030] env[68798]: DEBUG nova.compute.manager [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 799.711937] env[68798]: DEBUG nova.network.neutron [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 799.722309] env[68798]: DEBUG nova.compute.manager [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 799.764209] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 799.764410] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance c1606420-0fd3-4bd3-a8fa-91772c11f9bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 799.764541] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 90d9df19-2d93-4543-a650-4a624f505d5d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 799.765544] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 620ef3f6-0444-474d-8179-3dc0143f2e99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 799.765544] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e060aaea-7508-46ed-8786-b5753fde75e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 799.765544] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 1ecf18e5-a4a1-4efb-b54a-964b064b51e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 799.765544] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 3bf7d713-8315-48d9-85dd-4ff09c9c7782 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 799.765783] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance cdb141da-a05c-4891-a33d-6e12eafe4f22 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 799.765783] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 551707b9-118e-45c8-a28f-e70486272f6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 799.765783] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 89f660c8-6efd-4789-90ee-67e42abc1db7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 799.778851] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 2fc5f330-2a23-4a67-a49c-c4985928417b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 799.794926] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 9d533b44-6afa-4c5b-a0a6-90ff442f7771 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 799.799259] env[68798]: DEBUG nova.compute.manager [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 799.806954] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ad71bb4b-829d-4297-857f-249e4c499623 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 799.818499] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 9778caa6-4d64-483e-9b72-e82c9977f9ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 799.829553] env[68798]: DEBUG nova.virt.hardware [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 799.829817] env[68798]: DEBUG nova.virt.hardware [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 799.829979] env[68798]: DEBUG nova.virt.hardware [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 799.830186] env[68798]: DEBUG nova.virt.hardware [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 799.830338] env[68798]: DEBUG nova.virt.hardware [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 799.830483] env[68798]: DEBUG nova.virt.hardware [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 799.830686] env[68798]: DEBUG nova.virt.hardware [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 799.831192] env[68798]: DEBUG nova.virt.hardware [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 799.831192] env[68798]: DEBUG nova.virt.hardware [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 799.831192] env[68798]: DEBUG nova.virt.hardware [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 799.831529] env[68798]: DEBUG nova.virt.hardware [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 799.832973] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7e3e4b0-942b-40aa-a754-8eb16050c51b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.837138] env[68798]: DEBUG nova.policy [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '33a1e17a7aa0483f804240a372cb3e7f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dc05c32293994f54afe1ee53b8ecab37', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 799.839757] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 6a79f513-a8f0-4ceb-b4fd-466474dd67c8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 799.852278] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a583be4-3a83-43d0-9117-f87ca9994b58 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.856453] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ced820f2-8c09-4000-8f46-e0c5909e5b2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 799.869271] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 9c094711-2653-4a44-a4b0-020621beb4fe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 799.879886] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 092cc50b-edcc-4d9b-a981-a06e2eaaf321 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 799.890719] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 9923a3c7-f090-4a01-8c57-36c8c22c6b14 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 799.901641] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e1925875-04db-4b88-ad54-d33ff804d365 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 799.913824] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e3dfc177-6f11-48e4-bbac-83bda39fbb8d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 799.925126] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ceb21573-1cb0-4af6-9897-f164997b4b5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 799.936296] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 10442eb5-8f1a-4b4c-9aab-78605de8dfea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 799.949411] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 0d4e5325-aa25-4766-a490-9719b3f354c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 799.960599] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 0913eee8-aa9a-4e1b-8aec-48d8a9197530 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 799.974414] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance c59a14e2-3655-4177-961a-34552be1ccb1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 799.986574] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 6cf752ef-a49e-477f-8297-59621e69e712 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 800.002635] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 2afc987a-45cc-484e-a6f0-a0118e2e73eb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 800.011584] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 8f9bdc2e-bc45-46f8-a1dd-b98046bddc2c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 800.025100] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 95cffda2-8119-4401-ac53-9d7042ef7180 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 800.037740] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 1752a71d-8cc7-4f88-b097-53094df226e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 800.049561] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 800.067861] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e848c3f4-64ff-4956-88e0-afa27be73068 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 800.068215] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 800.068298] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 800.459976] env[68798]: DEBUG nova.network.neutron [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Successfully created port: bb009a78-ee8f-463a-b810-4880e76dc735 {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 800.560316] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-950ec63d-a8dc-443c-9945-e4373fdd158a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.569485] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ebac908-1f0d-4a92-b6c5-97fc8c9b129e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.606787] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b43f655-753a-4340-9809-13b666fed1f2 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.613866] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe0cdd6e-91ae-43d9-b2b9-1fa7fc326801 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.629608] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 800.638051] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 800.659644] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 800.659977] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.987s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 801.422157] env[68798]: DEBUG nova.compute.manager [req-c5df81f2-b2e9-4b85-9f90-d4da9834f821 req-9442d3b7-d866-43b3-ad04-3a9037dd4ec3 service nova] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Received event network-vif-plugged-bb009a78-ee8f-463a-b810-4880e76dc735 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 801.422157] env[68798]: DEBUG oslo_concurrency.lockutils [req-c5df81f2-b2e9-4b85-9f90-d4da9834f821 req-9442d3b7-d866-43b3-ad04-3a9037dd4ec3 service nova] Acquiring lock "89f660c8-6efd-4789-90ee-67e42abc1db7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 801.422157] env[68798]: DEBUG oslo_concurrency.lockutils [req-c5df81f2-b2e9-4b85-9f90-d4da9834f821 req-9442d3b7-d866-43b3-ad04-3a9037dd4ec3 service nova] Lock "89f660c8-6efd-4789-90ee-67e42abc1db7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 801.422157] env[68798]: DEBUG oslo_concurrency.lockutils [req-c5df81f2-b2e9-4b85-9f90-d4da9834f821 req-9442d3b7-d866-43b3-ad04-3a9037dd4ec3 service nova] Lock "89f660c8-6efd-4789-90ee-67e42abc1db7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 801.422511] env[68798]: DEBUG nova.compute.manager [req-c5df81f2-b2e9-4b85-9f90-d4da9834f821 req-9442d3b7-d866-43b3-ad04-3a9037dd4ec3 service nova] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] No waiting events found dispatching network-vif-plugged-bb009a78-ee8f-463a-b810-4880e76dc735 {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 801.422511] env[68798]: WARNING nova.compute.manager [req-c5df81f2-b2e9-4b85-9f90-d4da9834f821 req-9442d3b7-d866-43b3-ad04-3a9037dd4ec3 service nova] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Received unexpected event network-vif-plugged-bb009a78-ee8f-463a-b810-4880e76dc735 for instance with vm_state building and task_state spawning. [ 801.648020] env[68798]: DEBUG nova.network.neutron [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Successfully updated port: bb009a78-ee8f-463a-b810-4880e76dc735 {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 801.662055] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Acquiring lock "refresh_cache-89f660c8-6efd-4789-90ee-67e42abc1db7" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.662055] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Acquired lock "refresh_cache-89f660c8-6efd-4789-90ee-67e42abc1db7" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.662055] env[68798]: DEBUG nova.network.neutron [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 801.726238] env[68798]: DEBUG nova.network.neutron [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 802.034121] env[68798]: DEBUG nova.network.neutron [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Updating instance_info_cache with network_info: [{"id": "bb009a78-ee8f-463a-b810-4880e76dc735", "address": "fa:16:3e:1a:b2:27", "network": {"id": "579f6646-7b99-4b4f-a7b6-3ef78f94147b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.64", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc340e748dae4a43b16acfcfeecd7cd0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb009a78-ee", "ovs_interfaceid": "bb009a78-ee8f-463a-b810-4880e76dc735", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.051202] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Releasing lock "refresh_cache-89f660c8-6efd-4789-90ee-67e42abc1db7" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 802.051202] env[68798]: DEBUG nova.compute.manager [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Instance network_info: |[{"id": "bb009a78-ee8f-463a-b810-4880e76dc735", "address": "fa:16:3e:1a:b2:27", "network": {"id": "579f6646-7b99-4b4f-a7b6-3ef78f94147b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.64", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc340e748dae4a43b16acfcfeecd7cd0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb009a78-ee", "ovs_interfaceid": "bb009a78-ee8f-463a-b810-4880e76dc735", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 802.051420] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:b2:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '43ad01d2-c7dd-453c-a929-8ad76294d13c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bb009a78-ee8f-463a-b810-4880e76dc735', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 802.058223] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Creating folder: Project (dc05c32293994f54afe1ee53b8ecab37). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 802.058983] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ffbe772e-c9f5-462b-abb3-c92cccde135f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.071870] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Created folder: Project (dc05c32293994f54afe1ee53b8ecab37) in parent group-v834492. [ 802.071870] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Creating folder: Instances. Parent ref: group-v834536. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 802.071870] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7632df89-7332-41e2-9fc4-b0dd98c8e2f5 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.080618] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Created folder: Instances in parent group-v834536. [ 802.081230] env[68798]: DEBUG oslo.service.loopingcall [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 802.081625] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 802.082144] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-93130bdc-5827-432c-bb33-099fa807ef5e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.112692] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 802.112692] env[68798]: value = "task-4217571" [ 802.112692] env[68798]: _type = "Task" [ 802.112692] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.123869] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217571, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.621314] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217571, 'name': CreateVM_Task, 'duration_secs': 0.345448} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.621650] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 802.622336] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 802.622502] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.622861] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 802.623195] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-055f5c2a-b6ad-48e8-8ce3-96362c25f321 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.629127] env[68798]: DEBUG oslo_vmware.api [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Waiting for the task: (returnval){ [ 802.629127] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]527c84e3-83c6-ad2a-c4dd-b232453dd40e" [ 802.629127] env[68798]: _type = "Task" [ 802.629127] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.640029] env[68798]: DEBUG oslo_vmware.api [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]527c84e3-83c6-ad2a-c4dd-b232453dd40e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.144453] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 803.144453] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 803.144453] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.777398] env[68798]: DEBUG nova.compute.manager [req-bed4fe47-9d59-4d2d-ad2d-ebf722249594 req-ce23a2ef-173c-4bff-bedc-b92ed7cec969 service nova] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Received event network-changed-bb009a78-ee8f-463a-b810-4880e76dc735 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 803.777670] env[68798]: DEBUG nova.compute.manager [req-bed4fe47-9d59-4d2d-ad2d-ebf722249594 req-ce23a2ef-173c-4bff-bedc-b92ed7cec969 service nova] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Refreshing instance network info cache due to event network-changed-bb009a78-ee8f-463a-b810-4880e76dc735. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 803.777816] env[68798]: DEBUG oslo_concurrency.lockutils [req-bed4fe47-9d59-4d2d-ad2d-ebf722249594 req-ce23a2ef-173c-4bff-bedc-b92ed7cec969 service nova] Acquiring lock "refresh_cache-89f660c8-6efd-4789-90ee-67e42abc1db7" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.777956] env[68798]: DEBUG oslo_concurrency.lockutils [req-bed4fe47-9d59-4d2d-ad2d-ebf722249594 req-ce23a2ef-173c-4bff-bedc-b92ed7cec969 service nova] Acquired lock "refresh_cache-89f660c8-6efd-4789-90ee-67e42abc1db7" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.778321] env[68798]: DEBUG nova.network.neutron [req-bed4fe47-9d59-4d2d-ad2d-ebf722249594 req-ce23a2ef-173c-4bff-bedc-b92ed7cec969 service nova] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Refreshing network info cache for port bb009a78-ee8f-463a-b810-4880e76dc735 {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 804.348157] env[68798]: DEBUG nova.network.neutron [req-bed4fe47-9d59-4d2d-ad2d-ebf722249594 req-ce23a2ef-173c-4bff-bedc-b92ed7cec969 service nova] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Updated VIF entry in instance network info cache for port bb009a78-ee8f-463a-b810-4880e76dc735. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 804.348535] env[68798]: DEBUG nova.network.neutron [req-bed4fe47-9d59-4d2d-ad2d-ebf722249594 req-ce23a2ef-173c-4bff-bedc-b92ed7cec969 service nova] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Updating instance_info_cache with network_info: [{"id": "bb009a78-ee8f-463a-b810-4880e76dc735", "address": "fa:16:3e:1a:b2:27", "network": {"id": "579f6646-7b99-4b4f-a7b6-3ef78f94147b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.64", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc340e748dae4a43b16acfcfeecd7cd0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb009a78-ee", "ovs_interfaceid": "bb009a78-ee8f-463a-b810-4880e76dc735", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.360801] env[68798]: DEBUG oslo_concurrency.lockutils [req-bed4fe47-9d59-4d2d-ad2d-ebf722249594 req-ce23a2ef-173c-4bff-bedc-b92ed7cec969 service nova] Releasing lock "refresh_cache-89f660c8-6efd-4789-90ee-67e42abc1db7" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.583070] env[68798]: DEBUG oslo_concurrency.lockutils [None req-534543cd-c597-4e73-87db-29d435af23b6 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Acquiring lock "82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.611253] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Acquiring lock "30e8027d-98b3-4a5f-9eb4-244846cb90e2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.611973] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Lock "30e8027d-98b3-4a5f-9eb4-244846cb90e2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 810.249427] env[68798]: DEBUG oslo_concurrency.lockutils [None req-822d4ae7-c419-400f-a7c2-3e2307d910da tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Acquiring lock "5a4174a6-bf87-4107-8382-8c0f90253d45" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 810.249666] env[68798]: DEBUG oslo_concurrency.lockutils [None req-822d4ae7-c419-400f-a7c2-3e2307d910da tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Lock "5a4174a6-bf87-4107-8382-8c0f90253d45" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.711460] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f69061dd-0132-4b60-a66c-7f013400a936 tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Acquiring lock "c1606420-0fd3-4bd3-a8fa-91772c11f9bf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 814.452643] env[68798]: DEBUG oslo_concurrency.lockutils [None req-68a7be0e-395a-4100-9848-af8928cfc82e tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Acquiring lock "90d9df19-2d93-4543-a650-4a624f505d5d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.156233] env[68798]: DEBUG oslo_concurrency.lockutils [None req-46fe5e63-4ba9-42ef-907e-1fa0bf004168 tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Acquiring lock "620ef3f6-0444-474d-8179-3dc0143f2e99" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.160097] env[68798]: DEBUG oslo_concurrency.lockutils [None req-51c25592-89e8-4067-b26e-fefcdd87df38 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Acquiring lock "e060aaea-7508-46ed-8786-b5753fde75e9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.526487] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ec2934c8-d025-4c22-ba78-8ada6933e28a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Acquiring lock "1ecf18e5-a4a1-4efb-b54a-964b064b51e5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 836.617812] env[68798]: DEBUG oslo_concurrency.lockutils [None req-69c0c6d2-66cb-4f02-a51d-9374d1edf910 tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Acquiring lock "3bf7d713-8315-48d9-85dd-4ff09c9c7782" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.725907] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0c6ebe75-0926-4f39-a5ac-6882480e20f6 tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Acquiring lock "cdb141da-a05c-4891-a33d-6e12eafe4f22" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.542147] env[68798]: DEBUG oslo_concurrency.lockutils [None req-002969ef-b15a-4958-94e3-19b8c9c7b596 tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Acquiring lock "89f660c8-6efd-4789-90ee-67e42abc1db7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.414043] env[68798]: WARNING oslo_vmware.rw_handles [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 845.414043] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 845.414043] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 845.414043] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 845.414043] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 845.414043] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 845.414043] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 845.414043] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 845.414043] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 845.414043] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 845.414043] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 845.414043] env[68798]: ERROR oslo_vmware.rw_handles [ 845.414043] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/72826798-4736-4776-a4c9-537fcd354696/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 845.415064] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 845.415064] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Copying Virtual Disk [datastore1] vmware_temp/72826798-4736-4776-a4c9-537fcd354696/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/72826798-4736-4776-a4c9-537fcd354696/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 845.415064] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5345fb8e-c790-4448-aa8a-73a14d9f7812 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.428084] env[68798]: DEBUG oslo_vmware.api [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Waiting for the task: (returnval){ [ 845.428084] env[68798]: value = "task-4217572" [ 845.428084] env[68798]: _type = "Task" [ 845.428084] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.445253] env[68798]: DEBUG oslo_vmware.api [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Task: {'id': task-4217572, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.943096] env[68798]: DEBUG oslo_vmware.exceptions [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 845.943585] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 845.944289] env[68798]: ERROR nova.compute.manager [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 845.944289] env[68798]: Faults: ['InvalidArgument'] [ 845.944289] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Traceback (most recent call last): [ 845.944289] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 845.944289] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] yield resources [ 845.944289] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 845.944289] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] self.driver.spawn(context, instance, image_meta, [ 845.944289] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 845.944289] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 845.944289] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 845.944289] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] self._fetch_image_if_missing(context, vi) [ 845.944289] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 845.944693] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] image_cache(vi, tmp_image_ds_loc) [ 845.944693] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 845.944693] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] vm_util.copy_virtual_disk( [ 845.944693] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 845.944693] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] session._wait_for_task(vmdk_copy_task) [ 845.944693] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 845.944693] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] return self.wait_for_task(task_ref) [ 845.944693] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 845.944693] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] return evt.wait() [ 845.944693] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 845.944693] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] result = hub.switch() [ 845.944693] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 845.944693] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] return self.greenlet.switch() [ 845.945044] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 845.945044] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] self.f(*self.args, **self.kw) [ 845.945044] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 845.945044] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] raise exceptions.translate_fault(task_info.error) [ 845.945044] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 845.945044] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Faults: ['InvalidArgument'] [ 845.945044] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] [ 845.945044] env[68798]: INFO nova.compute.manager [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Terminating instance [ 845.947233] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.948195] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 845.948358] env[68798]: DEBUG nova.compute.manager [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 845.948583] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 845.948862] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5c8fc917-e2a5-4620-98aa-53b9daa8679c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.954008] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ed3d609-69a9-42fb-a856-0c26009aeffe {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.962341] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 845.966035] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-91e682c9-47fa-483d-8162-e3096375894e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.967239] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 845.967239] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 845.969093] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64f8533e-ad6a-4005-99a0-ff3b79236da8 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.977727] env[68798]: DEBUG oslo_vmware.api [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Waiting for the task: (returnval){ [ 845.977727] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52fea8bd-a92f-915f-d4d9-32594e8a56dd" [ 845.977727] env[68798]: _type = "Task" [ 845.977727] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.996174] env[68798]: DEBUG oslo_vmware.api [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52fea8bd-a92f-915f-d4d9-32594e8a56dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.050489] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 846.050980] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 846.050980] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Deleting the datastore file [datastore1] 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7 {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 846.051794] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c3da9f9b-5887-4cb1-9a9b-977da4d782f1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.063739] env[68798]: DEBUG oslo_vmware.api [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Waiting for the task: (returnval){ [ 846.063739] env[68798]: value = "task-4217574" [ 846.063739] env[68798]: _type = "Task" [ 846.063739] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.077664] env[68798]: DEBUG oslo_vmware.api [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Task: {'id': task-4217574, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.488858] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 846.489320] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Creating directory with path [datastore1] vmware_temp/c6c1543b-610c-4adc-be7a-731a3a6b0d04/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 846.489626] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-60b1afd2-67bb-4eb3-92e6-1cfeab7df95d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.507641] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Created directory with path [datastore1] vmware_temp/c6c1543b-610c-4adc-be7a-731a3a6b0d04/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 846.507948] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Fetch image to [datastore1] vmware_temp/c6c1543b-610c-4adc-be7a-731a3a6b0d04/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 846.508203] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/c6c1543b-610c-4adc-be7a-731a3a6b0d04/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 846.508941] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a108242e-99f3-4e70-b3be-39a0175fd686 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.518043] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f98607d4-dcee-46f1-af56-61f3e475a5a3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.530556] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c66f052e-26d4-407e-9231-abe0b7daa587 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.574432] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3661454-d92f-4dac-a986-18277d9dd2e4 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.583763] env[68798]: DEBUG oslo_vmware.api [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Task: {'id': task-4217574, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083579} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.584803] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 846.585508] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 846.585508] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 846.585508] env[68798]: INFO nova.compute.manager [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Took 0.64 seconds to destroy the instance on the hypervisor. [ 846.589077] env[68798]: DEBUG nova.compute.claims [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 846.589077] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.589077] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.591798] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a575704b-1be3-4d9c-b045-9e0ea520649c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.626787] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 846.710157] env[68798]: DEBUG oslo_vmware.rw_handles [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c6c1543b-610c-4adc-be7a-731a3a6b0d04/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 846.783263] env[68798]: DEBUG oslo_vmware.rw_handles [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 846.783554] env[68798]: DEBUG oslo_vmware.rw_handles [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c6c1543b-610c-4adc-be7a-731a3a6b0d04/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 847.213116] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac0047ca-e0b8-47d6-b415-1d451b2c4ba1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.225563] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be350fdd-8af9-4a9d-b47d-0a9bc2c76d3e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.260751] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad94de88-f5c8-48a3-b9de-5fa8f710f6ee {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.269388] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9b40aee-b4c8-4ce6-a4d5-02b2c9b89091 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.285192] env[68798]: DEBUG nova.compute.provider_tree [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 847.297262] env[68798]: DEBUG nova.scheduler.client.report [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 847.316049] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.728s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.316645] env[68798]: ERROR nova.compute.manager [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 847.316645] env[68798]: Faults: ['InvalidArgument'] [ 847.316645] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Traceback (most recent call last): [ 847.316645] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 847.316645] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] self.driver.spawn(context, instance, image_meta, [ 847.316645] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 847.316645] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 847.316645] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 847.316645] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] self._fetch_image_if_missing(context, vi) [ 847.316645] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 847.316645] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] image_cache(vi, tmp_image_ds_loc) [ 847.316645] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 847.317061] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] vm_util.copy_virtual_disk( [ 847.317061] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 847.317061] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] session._wait_for_task(vmdk_copy_task) [ 847.317061] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 847.317061] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] return self.wait_for_task(task_ref) [ 847.317061] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 847.317061] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] return evt.wait() [ 847.317061] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 847.317061] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] result = hub.switch() [ 847.317061] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 847.317061] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] return self.greenlet.switch() [ 847.317061] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 847.317061] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] self.f(*self.args, **self.kw) [ 847.317424] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 847.317424] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] raise exceptions.translate_fault(task_info.error) [ 847.317424] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 847.317424] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Faults: ['InvalidArgument'] [ 847.317424] env[68798]: ERROR nova.compute.manager [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] [ 847.317642] env[68798]: DEBUG nova.compute.utils [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 847.319256] env[68798]: DEBUG nova.compute.manager [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Build of instance 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7 was re-scheduled: A specified parameter was not correct: fileType [ 847.319256] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 847.319775] env[68798]: DEBUG nova.compute.manager [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 847.319936] env[68798]: DEBUG nova.compute.manager [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 847.321654] env[68798]: DEBUG nova.compute.manager [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 847.321654] env[68798]: DEBUG nova.network.neutron [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 847.744523] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Acquiring lock "a7060037-2580-464a-b434-90ffe7314bd1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.744861] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Lock "a7060037-2580-464a-b434-90ffe7314bd1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.101707] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f7c4ea09-1e0f-446b-b52b-34b23d6afb22 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Acquiring lock "551707b9-118e-45c8-a28f-e70486272f6e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.156193] env[68798]: DEBUG nova.network.neutron [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.177194] env[68798]: INFO nova.compute.manager [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Took 0.86 seconds to deallocate network for instance. [ 848.347746] env[68798]: INFO nova.scheduler.client.report [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Deleted allocations for instance 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7 [ 848.379625] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ede2907e-428f-486c-9dfc-27526c247439 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Lock "82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 243.014s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.381260] env[68798]: DEBUG oslo_concurrency.lockutils [None req-534543cd-c597-4e73-87db-29d435af23b6 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Lock "82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 43.798s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.381512] env[68798]: DEBUG oslo_concurrency.lockutils [None req-534543cd-c597-4e73-87db-29d435af23b6 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Acquiring lock "82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.381762] env[68798]: DEBUG oslo_concurrency.lockutils [None req-534543cd-c597-4e73-87db-29d435af23b6 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Lock "82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.381909] env[68798]: DEBUG oslo_concurrency.lockutils [None req-534543cd-c597-4e73-87db-29d435af23b6 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Lock "82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.387588] env[68798]: INFO nova.compute.manager [None req-534543cd-c597-4e73-87db-29d435af23b6 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Terminating instance [ 848.389265] env[68798]: DEBUG nova.compute.manager [None req-534543cd-c597-4e73-87db-29d435af23b6 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 848.389484] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-534543cd-c597-4e73-87db-29d435af23b6 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 848.389998] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aab19125-ccd9-4a1d-b509-a198cfd4ee4a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.400553] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc009c58-353a-4fa9-8745-c7cb7354da5a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.411875] env[68798]: DEBUG nova.compute.manager [None req-29280c3d-cde0-46b4-afb5-64b459be9426 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: 2fc5f330-2a23-4a67-a49c-c4985928417b] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 848.433894] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-534543cd-c597-4e73-87db-29d435af23b6 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7 could not be found. [ 848.434150] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-534543cd-c597-4e73-87db-29d435af23b6 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 848.434299] env[68798]: INFO nova.compute.manager [None req-534543cd-c597-4e73-87db-29d435af23b6 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 848.437087] env[68798]: DEBUG oslo.service.loopingcall [None req-534543cd-c597-4e73-87db-29d435af23b6 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 848.437087] env[68798]: DEBUG nova.compute.manager [-] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 848.437087] env[68798]: DEBUG nova.network.neutron [-] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 848.456030] env[68798]: DEBUG nova.compute.manager [None req-29280c3d-cde0-46b4-afb5-64b459be9426 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: 2fc5f330-2a23-4a67-a49c-c4985928417b] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 848.482106] env[68798]: DEBUG oslo_concurrency.lockutils [None req-29280c3d-cde0-46b4-afb5-64b459be9426 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Lock "2fc5f330-2a23-4a67-a49c-c4985928417b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.169s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.497269] env[68798]: DEBUG nova.network.neutron [-] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.502341] env[68798]: DEBUG nova.compute.manager [None req-2b880fb0-15c7-4635-b80f-2f0f02006412 tempest-VolumesAssistedSnapshotsTest-1167727799 tempest-VolumesAssistedSnapshotsTest-1167727799-project-member] [instance: 9d533b44-6afa-4c5b-a0a6-90ff442f7771] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 848.507045] env[68798]: INFO nova.compute.manager [-] [instance: 82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7] Took 0.07 seconds to deallocate network for instance. [ 848.531848] env[68798]: DEBUG nova.compute.manager [None req-2b880fb0-15c7-4635-b80f-2f0f02006412 tempest-VolumesAssistedSnapshotsTest-1167727799 tempest-VolumesAssistedSnapshotsTest-1167727799-project-member] [instance: 9d533b44-6afa-4c5b-a0a6-90ff442f7771] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 848.557423] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b880fb0-15c7-4635-b80f-2f0f02006412 tempest-VolumesAssistedSnapshotsTest-1167727799 tempest-VolumesAssistedSnapshotsTest-1167727799-project-member] Lock "9d533b44-6afa-4c5b-a0a6-90ff442f7771" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.231s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.584753] env[68798]: DEBUG nova.compute.manager [None req-f2df0b5d-ddcf-4bd9-bfef-00516721b11d tempest-ServersWithSpecificFlavorTestJSON-1621209597 tempest-ServersWithSpecificFlavorTestJSON-1621209597-project-member] [instance: ad71bb4b-829d-4297-857f-249e4c499623] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 848.615769] env[68798]: DEBUG nova.compute.manager [None req-f2df0b5d-ddcf-4bd9-bfef-00516721b11d tempest-ServersWithSpecificFlavorTestJSON-1621209597 tempest-ServersWithSpecificFlavorTestJSON-1621209597-project-member] [instance: ad71bb4b-829d-4297-857f-249e4c499623] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 848.639503] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f2df0b5d-ddcf-4bd9-bfef-00516721b11d tempest-ServersWithSpecificFlavorTestJSON-1621209597 tempest-ServersWithSpecificFlavorTestJSON-1621209597-project-member] Lock "ad71bb4b-829d-4297-857f-249e4c499623" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.392s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.643609] env[68798]: DEBUG oslo_concurrency.lockutils [None req-534543cd-c597-4e73-87db-29d435af23b6 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Lock "82cf03c6-4fbf-4fc3-ba6b-ecb2df73efd7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.262s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.648900] env[68798]: DEBUG nova.compute.manager [None req-924772f1-c2a1-404b-ba6e-fe6ee1238c10 tempest-FloatingIPsAssociationNegativeTestJSON-1003925669 tempest-FloatingIPsAssociationNegativeTestJSON-1003925669-project-member] [instance: 9778caa6-4d64-483e-9b72-e82c9977f9ce] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 848.673737] env[68798]: DEBUG nova.compute.manager [None req-924772f1-c2a1-404b-ba6e-fe6ee1238c10 tempest-FloatingIPsAssociationNegativeTestJSON-1003925669 tempest-FloatingIPsAssociationNegativeTestJSON-1003925669-project-member] [instance: 9778caa6-4d64-483e-9b72-e82c9977f9ce] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 848.696798] env[68798]: DEBUG oslo_concurrency.lockutils [None req-924772f1-c2a1-404b-ba6e-fe6ee1238c10 tempest-FloatingIPsAssociationNegativeTestJSON-1003925669 tempest-FloatingIPsAssociationNegativeTestJSON-1003925669-project-member] Lock "9778caa6-4d64-483e-9b72-e82c9977f9ce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.813s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.712246] env[68798]: DEBUG nova.compute.manager [None req-949cd8ba-80d4-4bf1-ba1d-fad9038a1c01 tempest-ListServersNegativeTestJSON-997523842 tempest-ListServersNegativeTestJSON-997523842-project-member] [instance: 6a79f513-a8f0-4ceb-b4fd-466474dd67c8] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 848.745226] env[68798]: DEBUG nova.compute.manager [None req-949cd8ba-80d4-4bf1-ba1d-fad9038a1c01 tempest-ListServersNegativeTestJSON-997523842 tempest-ListServersNegativeTestJSON-997523842-project-member] [instance: 6a79f513-a8f0-4ceb-b4fd-466474dd67c8] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 848.769317] env[68798]: DEBUG oslo_concurrency.lockutils [None req-949cd8ba-80d4-4bf1-ba1d-fad9038a1c01 tempest-ListServersNegativeTestJSON-997523842 tempest-ListServersNegativeTestJSON-997523842-project-member] Lock "6a79f513-a8f0-4ceb-b4fd-466474dd67c8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.215s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.779751] env[68798]: DEBUG nova.compute.manager [None req-949cd8ba-80d4-4bf1-ba1d-fad9038a1c01 tempest-ListServersNegativeTestJSON-997523842 tempest-ListServersNegativeTestJSON-997523842-project-member] [instance: ced820f2-8c09-4000-8f46-e0c5909e5b2e] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 848.807066] env[68798]: DEBUG nova.compute.manager [None req-949cd8ba-80d4-4bf1-ba1d-fad9038a1c01 tempest-ListServersNegativeTestJSON-997523842 tempest-ListServersNegativeTestJSON-997523842-project-member] [instance: ced820f2-8c09-4000-8f46-e0c5909e5b2e] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 848.830112] env[68798]: DEBUG oslo_concurrency.lockutils [None req-949cd8ba-80d4-4bf1-ba1d-fad9038a1c01 tempest-ListServersNegativeTestJSON-997523842 tempest-ListServersNegativeTestJSON-997523842-project-member] Lock "ced820f2-8c09-4000-8f46-e0c5909e5b2e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.241s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.840862] env[68798]: DEBUG nova.compute.manager [None req-949cd8ba-80d4-4bf1-ba1d-fad9038a1c01 tempest-ListServersNegativeTestJSON-997523842 tempest-ListServersNegativeTestJSON-997523842-project-member] [instance: 9c094711-2653-4a44-a4b0-020621beb4fe] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 848.866022] env[68798]: DEBUG nova.compute.manager [None req-949cd8ba-80d4-4bf1-ba1d-fad9038a1c01 tempest-ListServersNegativeTestJSON-997523842 tempest-ListServersNegativeTestJSON-997523842-project-member] [instance: 9c094711-2653-4a44-a4b0-020621beb4fe] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 848.892867] env[68798]: DEBUG oslo_concurrency.lockutils [None req-949cd8ba-80d4-4bf1-ba1d-fad9038a1c01 tempest-ListServersNegativeTestJSON-997523842 tempest-ListServersNegativeTestJSON-997523842-project-member] Lock "9c094711-2653-4a44-a4b0-020621beb4fe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.254s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.903811] env[68798]: DEBUG nova.compute.manager [None req-c96d0001-2c58-4a6e-ad48-e080b6f1e5b8 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 092cc50b-edcc-4d9b-a981-a06e2eaaf321] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 848.930282] env[68798]: DEBUG nova.compute.manager [None req-c96d0001-2c58-4a6e-ad48-e080b6f1e5b8 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 092cc50b-edcc-4d9b-a981-a06e2eaaf321] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 848.956277] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c96d0001-2c58-4a6e-ad48-e080b6f1e5b8 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Lock "092cc50b-edcc-4d9b-a981-a06e2eaaf321" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.012s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.972199] env[68798]: DEBUG nova.compute.manager [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 849.056935] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.057892] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.058788] env[68798]: INFO nova.compute.claims [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 849.615724] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e0eb82f-e1d7-4f1f-8bad-364d16410d9f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.625219] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a14adfa8-c2f1-4515-9faf-cd7ac659bc7d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.656940] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a151c142-164b-4088-a31d-8695e93e34f0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.665156] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba70aab4-54f0-4a66-a321-72f9160f6cb2 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.680459] env[68798]: DEBUG nova.compute.provider_tree [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 849.695065] env[68798]: DEBUG nova.scheduler.client.report [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 849.713104] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.656s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.713631] env[68798]: DEBUG nova.compute.manager [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 849.758072] env[68798]: DEBUG nova.compute.utils [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 849.763206] env[68798]: DEBUG nova.compute.manager [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 849.763206] env[68798]: DEBUG nova.network.neutron [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 849.771505] env[68798]: DEBUG nova.compute.manager [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 849.844424] env[68798]: DEBUG nova.compute.manager [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 849.872042] env[68798]: DEBUG nova.virt.hardware [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 849.872319] env[68798]: DEBUG nova.virt.hardware [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 849.872480] env[68798]: DEBUG nova.virt.hardware [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 849.872674] env[68798]: DEBUG nova.virt.hardware [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 849.872848] env[68798]: DEBUG nova.virt.hardware [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 849.873008] env[68798]: DEBUG nova.virt.hardware [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 849.873324] env[68798]: DEBUG nova.virt.hardware [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 849.873536] env[68798]: DEBUG nova.virt.hardware [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 849.873755] env[68798]: DEBUG nova.virt.hardware [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 849.873968] env[68798]: DEBUG nova.virt.hardware [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 849.874208] env[68798]: DEBUG nova.virt.hardware [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 849.875162] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c25ab7-ce71-4da0-a6ad-ab345cd33def {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.885114] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57672d35-d20b-47a5-b873-e6afadf11e49 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.084112] env[68798]: DEBUG nova.policy [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '15198ec168fd4281a9912aab1896c36a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0adc61993bc741e6abd7779bdd56e719', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 850.680591] env[68798]: DEBUG nova.network.neutron [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Successfully created port: 90b8100a-f45a-40f4-917f-57d033dc4b73 {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 851.319344] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ff2e111a-9807-469c-b34c-2f5119d14917 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Acquiring lock "9923a3c7-f090-4a01-8c57-36c8c22c6b14" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.548792] env[68798]: DEBUG nova.network.neutron [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Successfully updated port: 90b8100a-f45a-40f4-917f-57d033dc4b73 {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 851.561652] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Acquiring lock "refresh_cache-9923a3c7-f090-4a01-8c57-36c8c22c6b14" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 851.561652] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Acquired lock "refresh_cache-9923a3c7-f090-4a01-8c57-36c8c22c6b14" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.561652] env[68798]: DEBUG nova.network.neutron [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 851.661805] env[68798]: DEBUG nova.network.neutron [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 851.906594] env[68798]: DEBUG nova.network.neutron [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Updating instance_info_cache with network_info: [{"id": "90b8100a-f45a-40f4-917f-57d033dc4b73", "address": "fa:16:3e:67:b1:6d", "network": {"id": "4d532f39-e224-42c8-b1b3-623832c95222", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1700754151-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0adc61993bc741e6abd7779bdd56e719", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0636c3f6-fcb7-4954-ab07-c5cd0dee37b0", "external-id": "nsx-vlan-transportzone-857", "segmentation_id": 857, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90b8100a-f4", "ovs_interfaceid": "90b8100a-f45a-40f4-917f-57d033dc4b73", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.920390] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Releasing lock "refresh_cache-9923a3c7-f090-4a01-8c57-36c8c22c6b14" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.920832] env[68798]: DEBUG nova.compute.manager [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Instance network_info: |[{"id": "90b8100a-f45a-40f4-917f-57d033dc4b73", "address": "fa:16:3e:67:b1:6d", "network": {"id": "4d532f39-e224-42c8-b1b3-623832c95222", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1700754151-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0adc61993bc741e6abd7779bdd56e719", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0636c3f6-fcb7-4954-ab07-c5cd0dee37b0", "external-id": "nsx-vlan-transportzone-857", "segmentation_id": 857, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90b8100a-f4", "ovs_interfaceid": "90b8100a-f45a-40f4-917f-57d033dc4b73", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 851.922063] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:b1:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0636c3f6-fcb7-4954-ab07-c5cd0dee37b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '90b8100a-f45a-40f4-917f-57d033dc4b73', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 851.929582] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Creating folder: Project (0adc61993bc741e6abd7779bdd56e719). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 851.930218] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7a0d9aeb-d19f-4f8f-8f05-2ce6a0c416bb {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.942546] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Created folder: Project (0adc61993bc741e6abd7779bdd56e719) in parent group-v834492. [ 851.942673] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Creating folder: Instances. Parent ref: group-v834539. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 851.943248] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-11a2c75f-ae8e-413b-8a90-8cbc0a28aee4 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.953141] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Created folder: Instances in parent group-v834539. [ 851.953548] env[68798]: DEBUG oslo.service.loopingcall [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 851.954162] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 851.954495] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ce8c79cb-760c-4e08-9180-d5b0e62b1168 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.979229] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 851.979229] env[68798]: value = "task-4217577" [ 851.979229] env[68798]: _type = "Task" [ 851.979229] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.988218] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217577, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.194629] env[68798]: DEBUG nova.compute.manager [req-52fa4968-88ab-4c8e-92a1-c123085d2b78 req-85c9d46b-aa3d-4f90-8b60-285563f7bee1 service nova] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Received event network-vif-plugged-90b8100a-f45a-40f4-917f-57d033dc4b73 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 852.194948] env[68798]: DEBUG oslo_concurrency.lockutils [req-52fa4968-88ab-4c8e-92a1-c123085d2b78 req-85c9d46b-aa3d-4f90-8b60-285563f7bee1 service nova] Acquiring lock "9923a3c7-f090-4a01-8c57-36c8c22c6b14-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.195336] env[68798]: DEBUG oslo_concurrency.lockutils [req-52fa4968-88ab-4c8e-92a1-c123085d2b78 req-85c9d46b-aa3d-4f90-8b60-285563f7bee1 service nova] Lock "9923a3c7-f090-4a01-8c57-36c8c22c6b14-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.195835] env[68798]: DEBUG oslo_concurrency.lockutils [req-52fa4968-88ab-4c8e-92a1-c123085d2b78 req-85c9d46b-aa3d-4f90-8b60-285563f7bee1 service nova] Lock "9923a3c7-f090-4a01-8c57-36c8c22c6b14-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.196174] env[68798]: DEBUG nova.compute.manager [req-52fa4968-88ab-4c8e-92a1-c123085d2b78 req-85c9d46b-aa3d-4f90-8b60-285563f7bee1 service nova] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] No waiting events found dispatching network-vif-plugged-90b8100a-f45a-40f4-917f-57d033dc4b73 {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 852.196363] env[68798]: WARNING nova.compute.manager [req-52fa4968-88ab-4c8e-92a1-c123085d2b78 req-85c9d46b-aa3d-4f90-8b60-285563f7bee1 service nova] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Received unexpected event network-vif-plugged-90b8100a-f45a-40f4-917f-57d033dc4b73 for instance with vm_state building and task_state deleting. [ 852.495023] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217577, 'name': CreateVM_Task, 'duration_secs': 0.332103} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.495023] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 852.495023] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 852.495023] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.495023] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 852.495474] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3319f40-ac61-41c1-a949-e31a81494a1b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.502029] env[68798]: DEBUG oslo_vmware.api [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Waiting for the task: (returnval){ [ 852.502029] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]5268e0fb-0fc9-a8a8-3a82-c555a7c230a0" [ 852.502029] env[68798]: _type = "Task" [ 852.502029] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.511857] env[68798]: DEBUG oslo_vmware.api [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]5268e0fb-0fc9-a8a8-3a82-c555a7c230a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.730920] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6ff0bab0-960d-4654-8425-e8e890bd4749 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Acquiring lock "40be8e0f-88ab-43bc-9923-5e9e478c1cdf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.731247] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6ff0bab0-960d-4654-8425-e8e890bd4749 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Lock "40be8e0f-88ab-43bc-9923-5e9e478c1cdf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.016765] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.017042] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 853.017350] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.048730] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 854.049068] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Cleaning up deleted instances {{(pid=68798) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11220}} [ 854.062870] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] There are 0 instances to clean {{(pid=68798) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 854.063353] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 854.063498] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Cleaning up deleted instances with incomplete migration {{(pid=68798) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11258}} [ 854.073363] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 855.190802] env[68798]: DEBUG nova.compute.manager [req-0c625a7c-1adc-4ab7-a622-890df7146265 req-792cea8a-3781-4336-bb4e-9a8dc5ae2fbc service nova] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Received event network-changed-90b8100a-f45a-40f4-917f-57d033dc4b73 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 855.191090] env[68798]: DEBUG nova.compute.manager [req-0c625a7c-1adc-4ab7-a622-890df7146265 req-792cea8a-3781-4336-bb4e-9a8dc5ae2fbc service nova] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Refreshing instance network info cache due to event network-changed-90b8100a-f45a-40f4-917f-57d033dc4b73. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 855.191090] env[68798]: DEBUG oslo_concurrency.lockutils [req-0c625a7c-1adc-4ab7-a622-890df7146265 req-792cea8a-3781-4336-bb4e-9a8dc5ae2fbc service nova] Acquiring lock "refresh_cache-9923a3c7-f090-4a01-8c57-36c8c22c6b14" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.191274] env[68798]: DEBUG oslo_concurrency.lockutils [req-0c625a7c-1adc-4ab7-a622-890df7146265 req-792cea8a-3781-4336-bb4e-9a8dc5ae2fbc service nova] Acquired lock "refresh_cache-9923a3c7-f090-4a01-8c57-36c8c22c6b14" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.191391] env[68798]: DEBUG nova.network.neutron [req-0c625a7c-1adc-4ab7-a622-890df7146265 req-792cea8a-3781-4336-bb4e-9a8dc5ae2fbc service nova] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Refreshing network info cache for port 90b8100a-f45a-40f4-917f-57d033dc4b73 {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 855.872935] env[68798]: DEBUG nova.network.neutron [req-0c625a7c-1adc-4ab7-a622-890df7146265 req-792cea8a-3781-4336-bb4e-9a8dc5ae2fbc service nova] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Updated VIF entry in instance network info cache for port 90b8100a-f45a-40f4-917f-57d033dc4b73. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 855.872935] env[68798]: DEBUG nova.network.neutron [req-0c625a7c-1adc-4ab7-a622-890df7146265 req-792cea8a-3781-4336-bb4e-9a8dc5ae2fbc service nova] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Updating instance_info_cache with network_info: [{"id": "90b8100a-f45a-40f4-917f-57d033dc4b73", "address": "fa:16:3e:67:b1:6d", "network": {"id": "4d532f39-e224-42c8-b1b3-623832c95222", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1700754151-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0adc61993bc741e6abd7779bdd56e719", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0636c3f6-fcb7-4954-ab07-c5cd0dee37b0", "external-id": "nsx-vlan-transportzone-857", "segmentation_id": 857, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90b8100a-f4", "ovs_interfaceid": "90b8100a-f45a-40f4-917f-57d033dc4b73", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.884108] env[68798]: DEBUG oslo_concurrency.lockutils [req-0c625a7c-1adc-4ab7-a622-890df7146265 req-792cea8a-3781-4336-bb4e-9a8dc5ae2fbc service nova] Releasing lock "refresh_cache-9923a3c7-f090-4a01-8c57-36c8c22c6b14" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.086647] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 857.086925] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 858.045825] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 858.048589] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 859.048775] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 859.049139] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 859.049139] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 859.074822] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 859.075340] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 859.075563] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 859.075741] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 859.075911] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 859.076097] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 859.076256] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 859.076418] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 859.076550] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 859.076700] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 859.076837] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 859.077454] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 859.077659] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 859.077852] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 859.078031] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 861.049094] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 861.064503] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.065362] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.066279] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.066279] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 861.067897] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fae607bf-0d4a-40e8-b920-617fc598c971 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.079765] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4149cc27-27f9-4f0b-8a9b-289cf32789da {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.100039] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f07f131b-d1be-48b8-a1c2-e47ef9eed568 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.108939] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42331867-e08e-4d7e-891f-19b23c664d3b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.142324] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180758MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 861.142503] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.142945] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.376840] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance c1606420-0fd3-4bd3-a8fa-91772c11f9bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 861.376973] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 90d9df19-2d93-4543-a650-4a624f505d5d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 861.377050] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 620ef3f6-0444-474d-8179-3dc0143f2e99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 861.378147] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e060aaea-7508-46ed-8786-b5753fde75e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 861.378147] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 1ecf18e5-a4a1-4efb-b54a-964b064b51e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 861.378147] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 3bf7d713-8315-48d9-85dd-4ff09c9c7782 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 861.378147] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance cdb141da-a05c-4891-a33d-6e12eafe4f22 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 861.378341] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 551707b9-118e-45c8-a28f-e70486272f6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 861.378341] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 89f660c8-6efd-4789-90ee-67e42abc1db7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 861.378341] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 9923a3c7-f090-4a01-8c57-36c8c22c6b14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 861.392960] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 0913eee8-aa9a-4e1b-8aec-48d8a9197530 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 861.415068] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 6cf752ef-a49e-477f-8297-59621e69e712 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 861.433880] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 2afc987a-45cc-484e-a6f0-a0118e2e73eb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 861.448027] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 8f9bdc2e-bc45-46f8-a1dd-b98046bddc2c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 861.462241] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 95cffda2-8119-4401-ac53-9d7042ef7180 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 861.476200] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 1752a71d-8cc7-4f88-b097-53094df226e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 861.488250] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 861.508621] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e848c3f4-64ff-4956-88e0-afa27be73068 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 861.524829] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 30e8027d-98b3-4a5f-9eb4-244846cb90e2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 861.534150] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 5a4174a6-bf87-4107-8382-8c0f90253d45 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 861.547199] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance a7060037-2580-464a-b434-90ffe7314bd1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 861.557667] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 40be8e0f-88ab-43bc-9923-5e9e478c1cdf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 861.557667] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 861.557667] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 861.583122] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Refreshing inventories for resource provider 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 861.611024] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Updating ProviderTree inventory for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 861.611024] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Updating inventory in ProviderTree for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 861.622697] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Refreshing aggregate associations for resource provider 855bb535-a51f-4f9d-8f32-8a3291b17319, aggregates: None {{(pid=68798) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 861.622881] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Updating resource provider 855bb535-a51f-4f9d-8f32-8a3291b17319 generation from 47 to 48 during operation: update_aggregates {{(pid=68798) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 861.644522] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Refreshing trait associations for resource provider 855bb535-a51f-4f9d-8f32-8a3291b17319, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=68798) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 861.764190] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e602cc45-9637-4728-a8d8-a271de573a73 tempest-ServersAaction247Test-1501939504 tempest-ServersAaction247Test-1501939504-project-member] Acquiring lock "e22852fa-7480-4761-8cd2-1371d6cb1410" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.764741] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e602cc45-9637-4728-a8d8-a271de573a73 tempest-ServersAaction247Test-1501939504 tempest-ServersAaction247Test-1501939504-project-member] Lock "e22852fa-7480-4761-8cd2-1371d6cb1410" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.063140] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27d833f3-2525-4e84-95c0-b75ea1ec4561 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.075728] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f29d4865-d3ee-43df-9d79-adad8e7de2d0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.111999] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-678d82d1-0c08-4f7b-8f70-f01b8500edb3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.120602] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40c01269-4a04-413f-8bc0-8ff4f7114125 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.136322] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 862.149096] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 862.169356] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 862.169517] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.027s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.272474] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7d9b0a26-7ccf-4863-9ae1-303f60ee2a72 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Acquiring lock "ca8f61e2-513d-48a0-aebd-18507eccd99d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.272743] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7d9b0a26-7ccf-4863-9ae1-303f60ee2a72 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Lock "ca8f61e2-513d-48a0-aebd-18507eccd99d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 864.756919] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ba34f1bc-0224-4b5f-be2e-26c4e036422a tempest-InstanceActionsTestJSON-1321523912 tempest-InstanceActionsTestJSON-1321523912-project-member] Acquiring lock "dd8c777d-d724-4f7c-9516-448c4b2abb5f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.757254] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ba34f1bc-0224-4b5f-be2e-26c4e036422a tempest-InstanceActionsTestJSON-1321523912 tempest-InstanceActionsTestJSON-1321523912-project-member] Lock "dd8c777d-d724-4f7c-9516-448c4b2abb5f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.793872] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bdbabf8c-dbfb-43f5-a7f2-3862f5700175 tempest-ServerActionsTestJSON-1499212270 tempest-ServerActionsTestJSON-1499212270-project-member] Acquiring lock "c9185203-eefd-455a-ba91-ec9797db792e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.794178] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bdbabf8c-dbfb-43f5-a7f2-3862f5700175 tempest-ServerActionsTestJSON-1499212270 tempest-ServerActionsTestJSON-1499212270-project-member] Lock "c9185203-eefd-455a-ba91-ec9797db792e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.946736] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ca275a74-07ed-4f4e-971e-100dba602961 tempest-ServersV294TestFqdnHostnames-1834906833 tempest-ServersV294TestFqdnHostnames-1834906833-project-member] Acquiring lock "d5f5af2a-c638-4abf-87f7-3fcb6ee0bcdd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.947092] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ca275a74-07ed-4f4e-971e-100dba602961 tempest-ServersV294TestFqdnHostnames-1834906833 tempest-ServersV294TestFqdnHostnames-1834906833-project-member] Lock "d5f5af2a-c638-4abf-87f7-3fcb6ee0bcdd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 870.262706] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fee39f75-cceb-4822-942a-5dae54a69cca tempest-ServerDiagnosticsTest-1659991863 tempest-ServerDiagnosticsTest-1659991863-project-member] Acquiring lock "28ba427d-0034-41e3-b474-eab0eb3c794e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.263010] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fee39f75-cceb-4822-942a-5dae54a69cca tempest-ServerDiagnosticsTest-1659991863 tempest-ServerDiagnosticsTest-1659991863-project-member] Lock "28ba427d-0034-41e3-b474-eab0eb3c794e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.734852] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2309fdb9-7be8-4a10-af8d-0a2af6ec3186 tempest-SecurityGroupsTestJSON-1301535814 tempest-SecurityGroupsTestJSON-1301535814-project-member] Acquiring lock "63902532-8a21-4dbe-8315-ef6c45f88859" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 874.736650] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2309fdb9-7be8-4a10-af8d-0a2af6ec3186 tempest-SecurityGroupsTestJSON-1301535814 tempest-SecurityGroupsTestJSON-1301535814-project-member] Lock "63902532-8a21-4dbe-8315-ef6c45f88859" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.554087] env[68798]: DEBUG oslo_concurrency.lockutils [None req-512673bb-5670-4d46-ab51-04f1d6fa630f tempest-AttachInterfacesTestJSON-1343652290 tempest-AttachInterfacesTestJSON-1343652290-project-member] Acquiring lock "d5d1053f-a0c0-4b93-aef4-60b6ca0dc1ef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.555106] env[68798]: DEBUG oslo_concurrency.lockutils [None req-512673bb-5670-4d46-ab51-04f1d6fa630f tempest-AttachInterfacesTestJSON-1343652290 tempest-AttachInterfacesTestJSON-1343652290-project-member] Lock "d5d1053f-a0c0-4b93-aef4-60b6ca0dc1ef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 892.643027] env[68798]: WARNING oslo_vmware.rw_handles [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 892.643027] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 892.643027] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 892.643027] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 892.643027] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 892.643027] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 892.643027] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 892.643027] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 892.643027] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 892.643027] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 892.643027] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 892.643027] env[68798]: ERROR oslo_vmware.rw_handles [ 892.643027] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/c6c1543b-610c-4adc-be7a-731a3a6b0d04/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 892.644321] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 892.644729] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Copying Virtual Disk [datastore1] vmware_temp/c6c1543b-610c-4adc-be7a-731a3a6b0d04/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/c6c1543b-610c-4adc-be7a-731a3a6b0d04/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 892.645183] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-77eb7070-3883-4d65-9588-2a56f1d08cb8 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.653983] env[68798]: DEBUG oslo_vmware.api [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Waiting for the task: (returnval){ [ 892.653983] env[68798]: value = "task-4217578" [ 892.653983] env[68798]: _type = "Task" [ 892.653983] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.665773] env[68798]: DEBUG oslo_vmware.api [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Task: {'id': task-4217578, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.167210] env[68798]: DEBUG oslo_vmware.exceptions [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 893.167599] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.168166] env[68798]: ERROR nova.compute.manager [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 893.168166] env[68798]: Faults: ['InvalidArgument'] [ 893.168166] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Traceback (most recent call last): [ 893.168166] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 893.168166] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] yield resources [ 893.168166] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 893.168166] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] self.driver.spawn(context, instance, image_meta, [ 893.168166] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 893.168166] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 893.168166] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 893.168166] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] self._fetch_image_if_missing(context, vi) [ 893.168166] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 893.168510] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] image_cache(vi, tmp_image_ds_loc) [ 893.168510] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 893.168510] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] vm_util.copy_virtual_disk( [ 893.168510] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 893.168510] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] session._wait_for_task(vmdk_copy_task) [ 893.168510] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 893.168510] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] return self.wait_for_task(task_ref) [ 893.168510] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 893.168510] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] return evt.wait() [ 893.168510] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 893.168510] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] result = hub.switch() [ 893.168510] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 893.168510] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] return self.greenlet.switch() [ 893.168855] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 893.168855] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] self.f(*self.args, **self.kw) [ 893.168855] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 893.168855] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] raise exceptions.translate_fault(task_info.error) [ 893.168855] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 893.168855] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Faults: ['InvalidArgument'] [ 893.168855] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] [ 893.168855] env[68798]: INFO nova.compute.manager [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Terminating instance [ 893.170303] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.170473] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 893.170591] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cc9f808a-811a-42fa-bb8c-344c33787132 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.173124] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Acquiring lock "refresh_cache-90d9df19-2d93-4543-a650-4a624f505d5d" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.173309] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Acquired lock "refresh_cache-90d9df19-2d93-4543-a650-4a624f505d5d" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.173480] env[68798]: DEBUG nova.network.neutron [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 893.181546] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 893.181763] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 893.182576] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c968c48e-4c53-4be3-951b-885f26178e83 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.191637] env[68798]: DEBUG oslo_vmware.api [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Waiting for the task: (returnval){ [ 893.191637] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52c3b6dc-1798-191b-f54d-7d8f1f09c174" [ 893.191637] env[68798]: _type = "Task" [ 893.191637] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.201321] env[68798]: DEBUG oslo_vmware.api [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52c3b6dc-1798-191b-f54d-7d8f1f09c174, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.216662] env[68798]: DEBUG nova.network.neutron [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 893.362490] env[68798]: DEBUG nova.network.neutron [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.376612] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Releasing lock "refresh_cache-90d9df19-2d93-4543-a650-4a624f505d5d" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 893.377075] env[68798]: DEBUG nova.compute.manager [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 893.377317] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 893.378591] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6fe2af0-7fa2-432c-b2a4-cd8b3520d920 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.388046] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 893.388317] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a224f70b-1f62-436a-b686-1df07ef1b0e3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.417575] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 893.418080] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 893.418228] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Deleting the datastore file [datastore1] 90d9df19-2d93-4543-a650-4a624f505d5d {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 893.418594] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-282eef51-5789-4138-8a52-4b4ce24f0cd0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.427360] env[68798]: DEBUG oslo_vmware.api [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Waiting for the task: (returnval){ [ 893.427360] env[68798]: value = "task-4217580" [ 893.427360] env[68798]: _type = "Task" [ 893.427360] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.435831] env[68798]: DEBUG oslo_vmware.api [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Task: {'id': task-4217580, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.710186] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 893.710617] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Creating directory with path [datastore1] vmware_temp/4b718706-3564-4eec-a2ab-d9c07678ffd3/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 893.710912] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-78f28bc1-dcac-4653-8c48-c1c2850792d1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.726079] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Created directory with path [datastore1] vmware_temp/4b718706-3564-4eec-a2ab-d9c07678ffd3/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 893.726408] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Fetch image to [datastore1] vmware_temp/4b718706-3564-4eec-a2ab-d9c07678ffd3/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 893.726592] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/4b718706-3564-4eec-a2ab-d9c07678ffd3/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 893.727440] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f4d548b-83f8-4d23-a785-a4f70ee53c45 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.735807] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a042968f-c8ce-4356-95f1-415640fa211f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.750503] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0ebb032-ad55-4d82-a0b0-1548e545bf0d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.781511] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4db9f82-a1e5-4880-b7d0-0de7ffa8da80 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.788729] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a81bfd58-20bf-4cff-bb84-0d77463cc297 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.812371] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 893.869510] env[68798]: DEBUG oslo_vmware.rw_handles [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4b718706-3564-4eec-a2ab-d9c07678ffd3/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 893.933464] env[68798]: DEBUG oslo_vmware.rw_handles [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 893.933652] env[68798]: DEBUG oslo_vmware.rw_handles [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4b718706-3564-4eec-a2ab-d9c07678ffd3/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 893.938168] env[68798]: DEBUG oslo_vmware.api [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Task: {'id': task-4217580, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.049303} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.938484] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 893.938657] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 893.938795] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 893.938987] env[68798]: INFO nova.compute.manager [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Took 0.56 seconds to destroy the instance on the hypervisor. [ 893.939260] env[68798]: DEBUG oslo.service.loopingcall [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 893.939490] env[68798]: DEBUG nova.compute.manager [-] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Skipping network deallocation for instance since networking was not requested. {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 893.941926] env[68798]: DEBUG nova.compute.claims [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 893.941998] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.942242] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.335506] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f941bbb9-34c0-4dd7-bc07-a064c412006a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.343998] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0727486-d30c-4201-bbdb-c7814eecae46 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.378599] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3110861-8b49-4dfb-be02-28be6ca70eac {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.390583] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d509cd-e3e5-49d9-a800-01d576b40e52 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.405418] env[68798]: DEBUG nova.compute.provider_tree [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 894.414272] env[68798]: DEBUG nova.scheduler.client.report [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 894.432516] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.490s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.433182] env[68798]: ERROR nova.compute.manager [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 894.433182] env[68798]: Faults: ['InvalidArgument'] [ 894.433182] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Traceback (most recent call last): [ 894.433182] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 894.433182] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] self.driver.spawn(context, instance, image_meta, [ 894.433182] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 894.433182] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 894.433182] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 894.433182] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] self._fetch_image_if_missing(context, vi) [ 894.433182] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 894.433182] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] image_cache(vi, tmp_image_ds_loc) [ 894.433182] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 894.433572] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] vm_util.copy_virtual_disk( [ 894.433572] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 894.433572] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] session._wait_for_task(vmdk_copy_task) [ 894.433572] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 894.433572] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] return self.wait_for_task(task_ref) [ 894.433572] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 894.433572] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] return evt.wait() [ 894.433572] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 894.433572] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] result = hub.switch() [ 894.433572] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 894.433572] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] return self.greenlet.switch() [ 894.433572] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 894.433572] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] self.f(*self.args, **self.kw) [ 894.434179] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 894.434179] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] raise exceptions.translate_fault(task_info.error) [ 894.434179] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 894.434179] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Faults: ['InvalidArgument'] [ 894.434179] env[68798]: ERROR nova.compute.manager [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] [ 894.434179] env[68798]: DEBUG nova.compute.utils [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 894.435622] env[68798]: DEBUG nova.compute.manager [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Build of instance 90d9df19-2d93-4543-a650-4a624f505d5d was re-scheduled: A specified parameter was not correct: fileType [ 894.435622] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 894.436025] env[68798]: DEBUG nova.compute.manager [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 894.436354] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Acquiring lock "refresh_cache-90d9df19-2d93-4543-a650-4a624f505d5d" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 894.436509] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Acquired lock "refresh_cache-90d9df19-2d93-4543-a650-4a624f505d5d" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.436672] env[68798]: DEBUG nova.network.neutron [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 894.474474] env[68798]: DEBUG nova.network.neutron [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 894.628664] env[68798]: DEBUG nova.network.neutron [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.640167] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Releasing lock "refresh_cache-90d9df19-2d93-4543-a650-4a624f505d5d" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.640421] env[68798]: DEBUG nova.compute.manager [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 894.640606] env[68798]: DEBUG nova.compute.manager [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Skipping network deallocation for instance since networking was not requested. {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 894.735969] env[68798]: INFO nova.scheduler.client.report [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Deleted allocations for instance 90d9df19-2d93-4543-a650-4a624f505d5d [ 894.764931] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4f1a7f0c-0c4e-44ea-86a4-46d7ab6d0577 tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Lock "90d9df19-2d93-4543-a650-4a624f505d5d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 278.348s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.764931] env[68798]: DEBUG oslo_concurrency.lockutils [None req-68a7be0e-395a-4100-9848-af8928cfc82e tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Lock "90d9df19-2d93-4543-a650-4a624f505d5d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 80.311s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.764931] env[68798]: DEBUG oslo_concurrency.lockutils [None req-68a7be0e-395a-4100-9848-af8928cfc82e tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Acquiring lock "90d9df19-2d93-4543-a650-4a624f505d5d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.765313] env[68798]: DEBUG oslo_concurrency.lockutils [None req-68a7be0e-395a-4100-9848-af8928cfc82e tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Lock "90d9df19-2d93-4543-a650-4a624f505d5d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.765313] env[68798]: DEBUG oslo_concurrency.lockutils [None req-68a7be0e-395a-4100-9848-af8928cfc82e tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Lock "90d9df19-2d93-4543-a650-4a624f505d5d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.767842] env[68798]: INFO nova.compute.manager [None req-68a7be0e-395a-4100-9848-af8928cfc82e tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Terminating instance [ 894.770421] env[68798]: DEBUG oslo_concurrency.lockutils [None req-68a7be0e-395a-4100-9848-af8928cfc82e tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Acquiring lock "refresh_cache-90d9df19-2d93-4543-a650-4a624f505d5d" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 894.770699] env[68798]: DEBUG oslo_concurrency.lockutils [None req-68a7be0e-395a-4100-9848-af8928cfc82e tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Acquired lock "refresh_cache-90d9df19-2d93-4543-a650-4a624f505d5d" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.770993] env[68798]: DEBUG nova.network.neutron [None req-68a7be0e-395a-4100-9848-af8928cfc82e tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 894.785421] env[68798]: DEBUG nova.compute.manager [None req-a5b3e2ba-4f09-41b7-a739-25a335806028 tempest-ServersTestFqdnHostnames-667902312 tempest-ServersTestFqdnHostnames-667902312-project-member] [instance: e1925875-04db-4b88-ad54-d33ff804d365] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 894.810532] env[68798]: DEBUG nova.network.neutron [None req-68a7be0e-395a-4100-9848-af8928cfc82e tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 894.815173] env[68798]: DEBUG nova.compute.manager [None req-a5b3e2ba-4f09-41b7-a739-25a335806028 tempest-ServersTestFqdnHostnames-667902312 tempest-ServersTestFqdnHostnames-667902312-project-member] [instance: e1925875-04db-4b88-ad54-d33ff804d365] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 894.836647] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a5b3e2ba-4f09-41b7-a739-25a335806028 tempest-ServersTestFqdnHostnames-667902312 tempest-ServersTestFqdnHostnames-667902312-project-member] Lock "e1925875-04db-4b88-ad54-d33ff804d365" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 240.759s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.848977] env[68798]: DEBUG nova.compute.manager [None req-de2efd34-c38e-4cea-a2b4-3595e086abdc tempest-ListImageFiltersTestJSON-803056761 tempest-ListImageFiltersTestJSON-803056761-project-member] [instance: e3dfc177-6f11-48e4-bbac-83bda39fbb8d] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 894.878301] env[68798]: DEBUG nova.compute.manager [None req-de2efd34-c38e-4cea-a2b4-3595e086abdc tempest-ListImageFiltersTestJSON-803056761 tempest-ListImageFiltersTestJSON-803056761-project-member] [instance: e3dfc177-6f11-48e4-bbac-83bda39fbb8d] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 894.905162] env[68798]: DEBUG oslo_concurrency.lockutils [None req-de2efd34-c38e-4cea-a2b4-3595e086abdc tempest-ListImageFiltersTestJSON-803056761 tempest-ListImageFiltersTestJSON-803056761-project-member] Lock "e3dfc177-6f11-48e4-bbac-83bda39fbb8d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 240.453s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.915652] env[68798]: DEBUG nova.compute.manager [None req-5be94f04-1b87-4fbd-8c2f-a5eda8a61397 tempest-ListImageFiltersTestJSON-803056761 tempest-ListImageFiltersTestJSON-803056761-project-member] [instance: ceb21573-1cb0-4af6-9897-f164997b4b5f] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 894.941113] env[68798]: DEBUG nova.compute.manager [None req-5be94f04-1b87-4fbd-8c2f-a5eda8a61397 tempest-ListImageFiltersTestJSON-803056761 tempest-ListImageFiltersTestJSON-803056761-project-member] [instance: ceb21573-1cb0-4af6-9897-f164997b4b5f] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 894.966589] env[68798]: DEBUG oslo_concurrency.lockutils [None req-5be94f04-1b87-4fbd-8c2f-a5eda8a61397 tempest-ListImageFiltersTestJSON-803056761 tempest-ListImageFiltersTestJSON-803056761-project-member] Lock "ceb21573-1cb0-4af6-9897-f164997b4b5f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 238.044s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.978236] env[68798]: DEBUG nova.compute.manager [None req-c4865a6e-bb80-4941-ab90-c6dc9d7d36a7 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: 10442eb5-8f1a-4b4c-9aab-78605de8dfea] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 895.013966] env[68798]: DEBUG nova.compute.manager [None req-c4865a6e-bb80-4941-ab90-c6dc9d7d36a7 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: 10442eb5-8f1a-4b4c-9aab-78605de8dfea] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 895.047284] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c4865a6e-bb80-4941-ab90-c6dc9d7d36a7 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Lock "10442eb5-8f1a-4b4c-9aab-78605de8dfea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 232.706s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.059370] env[68798]: DEBUG nova.compute.manager [None req-baf1c83b-91d2-42eb-ac70-24ad6b8bed7d tempest-ServerAddressesTestJSON-68008023 tempest-ServerAddressesTestJSON-68008023-project-member] [instance: 0d4e5325-aa25-4766-a490-9719b3f354c7] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 895.087546] env[68798]: DEBUG nova.compute.manager [None req-baf1c83b-91d2-42eb-ac70-24ad6b8bed7d tempest-ServerAddressesTestJSON-68008023 tempest-ServerAddressesTestJSON-68008023-project-member] [instance: 0d4e5325-aa25-4766-a490-9719b3f354c7] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 895.110628] env[68798]: DEBUG nova.network.neutron [None req-68a7be0e-395a-4100-9848-af8928cfc82e tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.115194] env[68798]: DEBUG oslo_concurrency.lockutils [None req-baf1c83b-91d2-42eb-ac70-24ad6b8bed7d tempest-ServerAddressesTestJSON-68008023 tempest-ServerAddressesTestJSON-68008023-project-member] Lock "0d4e5325-aa25-4766-a490-9719b3f354c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 232.219s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.121852] env[68798]: DEBUG oslo_concurrency.lockutils [None req-68a7be0e-395a-4100-9848-af8928cfc82e tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Releasing lock "refresh_cache-90d9df19-2d93-4543-a650-4a624f505d5d" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 895.122464] env[68798]: DEBUG nova.compute.manager [None req-68a7be0e-395a-4100-9848-af8928cfc82e tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 895.122796] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-68a7be0e-395a-4100-9848-af8928cfc82e tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 895.123465] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-48d26cf8-cf44-4be8-8dd2-979a2fc5b608 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.128044] env[68798]: DEBUG nova.compute.manager [None req-1669284c-a9fe-4ca6-a9b9-4343d6e97bae tempest-ServerShowV257Test-1462755352 tempest-ServerShowV257Test-1462755352-project-member] [instance: 0913eee8-aa9a-4e1b-8aec-48d8a9197530] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 895.141469] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6aea485-6c7b-454f-b4c3-8fb50d64ce52 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.164926] env[68798]: DEBUG nova.compute.manager [None req-1669284c-a9fe-4ca6-a9b9-4343d6e97bae tempest-ServerShowV257Test-1462755352 tempest-ServerShowV257Test-1462755352-project-member] [instance: 0913eee8-aa9a-4e1b-8aec-48d8a9197530] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 895.176962] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-68a7be0e-395a-4100-9848-af8928cfc82e tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 90d9df19-2d93-4543-a650-4a624f505d5d could not be found. [ 895.176962] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-68a7be0e-395a-4100-9848-af8928cfc82e tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 895.177627] env[68798]: INFO nova.compute.manager [None req-68a7be0e-395a-4100-9848-af8928cfc82e tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Took 0.05 seconds to destroy the instance on the hypervisor. [ 895.177627] env[68798]: DEBUG oslo.service.loopingcall [None req-68a7be0e-395a-4100-9848-af8928cfc82e tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 895.178150] env[68798]: DEBUG nova.compute.manager [-] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 895.178150] env[68798]: DEBUG nova.network.neutron [-] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 895.195573] env[68798]: DEBUG oslo_concurrency.lockutils [None req-1669284c-a9fe-4ca6-a9b9-4343d6e97bae tempest-ServerShowV257Test-1462755352 tempest-ServerShowV257Test-1462755352-project-member] Lock "0913eee8-aa9a-4e1b-8aec-48d8a9197530" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 231.897s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.205139] env[68798]: DEBUG nova.network.neutron [-] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 895.207145] env[68798]: DEBUG nova.compute.manager [None req-019f7356-5703-4475-afba-382fd1528c12 tempest-ServersListShow296Test-1039278903 tempest-ServersListShow296Test-1039278903-project-member] [instance: c59a14e2-3655-4177-961a-34552be1ccb1] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 895.214957] env[68798]: DEBUG nova.network.neutron [-] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.229705] env[68798]: INFO nova.compute.manager [-] [instance: 90d9df19-2d93-4543-a650-4a624f505d5d] Took 0.05 seconds to deallocate network for instance. [ 895.274698] env[68798]: DEBUG nova.compute.manager [None req-019f7356-5703-4475-afba-382fd1528c12 tempest-ServersListShow296Test-1039278903 tempest-ServersListShow296Test-1039278903-project-member] [instance: c59a14e2-3655-4177-961a-34552be1ccb1] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 895.305269] env[68798]: DEBUG oslo_concurrency.lockutils [None req-019f7356-5703-4475-afba-382fd1528c12 tempest-ServersListShow296Test-1039278903 tempest-ServersListShow296Test-1039278903-project-member] Lock "c59a14e2-3655-4177-961a-34552be1ccb1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 232.003s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.319349] env[68798]: DEBUG nova.compute.manager [None req-cf76ca76-2f97-4be1-9463-b5a82a152b2e tempest-SecurityGroupsTestJSON-1301535814 tempest-SecurityGroupsTestJSON-1301535814-project-member] [instance: 6cf752ef-a49e-477f-8297-59621e69e712] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 895.357164] env[68798]: DEBUG nova.compute.manager [None req-cf76ca76-2f97-4be1-9463-b5a82a152b2e tempest-SecurityGroupsTestJSON-1301535814 tempest-SecurityGroupsTestJSON-1301535814-project-member] [instance: 6cf752ef-a49e-477f-8297-59621e69e712] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 895.384444] env[68798]: DEBUG oslo_concurrency.lockutils [None req-68a7be0e-395a-4100-9848-af8928cfc82e tempest-ServerDiagnosticsV248Test-873566280 tempest-ServerDiagnosticsV248Test-873566280-project-member] Lock "90d9df19-2d93-4543-a650-4a624f505d5d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.619s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.398683] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cf76ca76-2f97-4be1-9463-b5a82a152b2e tempest-SecurityGroupsTestJSON-1301535814 tempest-SecurityGroupsTestJSON-1301535814-project-member] Lock "6cf752ef-a49e-477f-8297-59621e69e712" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.007s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.412976] env[68798]: DEBUG nova.compute.manager [None req-021d9b7c-2e90-4e9a-8f4b-903cba2310e6 tempest-AttachInterfacesTestJSON-1343652290 tempest-AttachInterfacesTestJSON-1343652290-project-member] [instance: 2afc987a-45cc-484e-a6f0-a0118e2e73eb] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 895.441601] env[68798]: DEBUG nova.compute.manager [None req-021d9b7c-2e90-4e9a-8f4b-903cba2310e6 tempest-AttachInterfacesTestJSON-1343652290 tempest-AttachInterfacesTestJSON-1343652290-project-member] [instance: 2afc987a-45cc-484e-a6f0-a0118e2e73eb] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 895.473585] env[68798]: DEBUG oslo_concurrency.lockutils [None req-021d9b7c-2e90-4e9a-8f4b-903cba2310e6 tempest-AttachInterfacesTestJSON-1343652290 tempest-AttachInterfacesTestJSON-1343652290-project-member] Lock "2afc987a-45cc-484e-a6f0-a0118e2e73eb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.512s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.487548] env[68798]: DEBUG nova.compute.manager [None req-1bd55f57-e1a5-490d-b901-1648df88fc08 tempest-AttachInterfacesV270Test-1172441625 tempest-AttachInterfacesV270Test-1172441625-project-member] [instance: 8f9bdc2e-bc45-46f8-a1dd-b98046bddc2c] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 895.517876] env[68798]: DEBUG nova.compute.manager [None req-1bd55f57-e1a5-490d-b901-1648df88fc08 tempest-AttachInterfacesV270Test-1172441625 tempest-AttachInterfacesV270Test-1172441625-project-member] [instance: 8f9bdc2e-bc45-46f8-a1dd-b98046bddc2c] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 895.550520] env[68798]: DEBUG oslo_concurrency.lockutils [None req-1bd55f57-e1a5-490d-b901-1648df88fc08 tempest-AttachInterfacesV270Test-1172441625 tempest-AttachInterfacesV270Test-1172441625-project-member] Lock "8f9bdc2e-bc45-46f8-a1dd-b98046bddc2c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 208.445s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.560687] env[68798]: DEBUG nova.compute.manager [None req-e7e3b441-781c-4adc-966a-a0d06a712e63 tempest-ServersTestBootFromVolume-2010485395 tempest-ServersTestBootFromVolume-2010485395-project-member] [instance: 95cffda2-8119-4401-ac53-9d7042ef7180] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 895.596135] env[68798]: DEBUG nova.compute.manager [None req-e7e3b441-781c-4adc-966a-a0d06a712e63 tempest-ServersTestBootFromVolume-2010485395 tempest-ServersTestBootFromVolume-2010485395-project-member] [instance: 95cffda2-8119-4401-ac53-9d7042ef7180] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 895.626411] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e7e3b441-781c-4adc-966a-a0d06a712e63 tempest-ServersTestBootFromVolume-2010485395 tempest-ServersTestBootFromVolume-2010485395-project-member] Lock "95cffda2-8119-4401-ac53-9d7042ef7180" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.152s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 895.638028] env[68798]: DEBUG nova.compute.manager [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] [instance: 1752a71d-8cc7-4f88-b097-53094df226e9] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 895.711072] env[68798]: DEBUG oslo_concurrency.lockutils [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.711072] env[68798]: DEBUG oslo_concurrency.lockutils [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.711072] env[68798]: INFO nova.compute.claims [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] [instance: 1752a71d-8cc7-4f88-b097-53094df226e9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 896.200774] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85cae48f-fab8-48d8-95c8-42b9d834b0a8 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.214580] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cf26f7b-77c2-4186-bec7-4ac0dc1067d9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.248897] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae6ca9ef-8f3e-4f8d-b57a-e42d6c569895 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.257017] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfdca466-91a0-4aa5-925f-73644468e53c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.273069] env[68798]: DEBUG nova.compute.provider_tree [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 896.283134] env[68798]: DEBUG nova.scheduler.client.report [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 896.300106] env[68798]: DEBUG oslo_concurrency.lockutils [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.591s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.305825] env[68798]: DEBUG oslo_concurrency.lockutils [None req-50bbba3d-5463-4b17-aed2-a7917fcefa55 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Acquiring lock "1752a71d-8cc7-4f88-b097-53094df226e9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.326023] env[68798]: DEBUG oslo_concurrency.lockutils [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Acquiring lock "bd30119a-b166-4192-9822-21c26ef31c23" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.326023] env[68798]: DEBUG oslo_concurrency.lockutils [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Lock "bd30119a-b166-4192-9822-21c26ef31c23" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.333261] env[68798]: DEBUG oslo_concurrency.lockutils [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Lock "bd30119a-b166-4192-9822-21c26ef31c23" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.008s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.333892] env[68798]: DEBUG nova.compute.manager [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] [instance: 1752a71d-8cc7-4f88-b097-53094df226e9] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 896.378042] env[68798]: DEBUG nova.compute.claims [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] [instance: 1752a71d-8cc7-4f88-b097-53094df226e9] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 896.378042] env[68798]: DEBUG oslo_concurrency.lockutils [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.378042] env[68798]: DEBUG oslo_concurrency.lockutils [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.798098] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24d2ec89-dc03-4cbb-9cb7-73c76de458c5 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.807247] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb356e7-0d1e-483c-85a6-5615238fec44 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.843015] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3624711d-e261-4d6e-88f2-372c3a039ddf {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.853319] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29056dc1-3fbf-42ae-8c77-81467cfef3e7 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.873591] env[68798]: DEBUG nova.compute.provider_tree [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 896.884954] env[68798]: DEBUG nova.scheduler.client.report [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 896.900536] env[68798]: DEBUG oslo_concurrency.lockutils [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.524s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.900749] env[68798]: DEBUG nova.compute.utils [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] [instance: 1752a71d-8cc7-4f88-b097-53094df226e9] Conflict updating instance 1752a71d-8cc7-4f88-b097-53094df226e9. Expected: {'task_state': [None]}. Actual: {'task_state': 'deleting'} {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 896.902206] env[68798]: DEBUG nova.compute.manager [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] [instance: 1752a71d-8cc7-4f88-b097-53094df226e9] Instance disappeared during build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2505}} [ 896.902374] env[68798]: DEBUG nova.compute.manager [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] [instance: 1752a71d-8cc7-4f88-b097-53094df226e9] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 896.902585] env[68798]: DEBUG oslo_concurrency.lockutils [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Acquiring lock "refresh_cache-1752a71d-8cc7-4f88-b097-53094df226e9" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.902724] env[68798]: DEBUG oslo_concurrency.lockutils [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Acquired lock "refresh_cache-1752a71d-8cc7-4f88-b097-53094df226e9" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.902876] env[68798]: DEBUG nova.network.neutron [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] [instance: 1752a71d-8cc7-4f88-b097-53094df226e9] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 896.958258] env[68798]: DEBUG nova.network.neutron [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] [instance: 1752a71d-8cc7-4f88-b097-53094df226e9] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 897.225023] env[68798]: DEBUG nova.network.neutron [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] [instance: 1752a71d-8cc7-4f88-b097-53094df226e9] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.238422] env[68798]: DEBUG oslo_concurrency.lockutils [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Releasing lock "refresh_cache-1752a71d-8cc7-4f88-b097-53094df226e9" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 897.241025] env[68798]: DEBUG nova.compute.manager [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 897.241025] env[68798]: DEBUG nova.compute.manager [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] [instance: 1752a71d-8cc7-4f88-b097-53094df226e9] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 897.241025] env[68798]: DEBUG nova.network.neutron [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] [instance: 1752a71d-8cc7-4f88-b097-53094df226e9] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 897.277863] env[68798]: DEBUG nova.network.neutron [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] [instance: 1752a71d-8cc7-4f88-b097-53094df226e9] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 897.287295] env[68798]: DEBUG nova.network.neutron [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] [instance: 1752a71d-8cc7-4f88-b097-53094df226e9] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.301040] env[68798]: INFO nova.compute.manager [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] [instance: 1752a71d-8cc7-4f88-b097-53094df226e9] Took 0.06 seconds to deallocate network for instance. [ 897.430322] env[68798]: INFO nova.scheduler.client.report [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Deleted allocations for instance 1752a71d-8cc7-4f88-b097-53094df226e9 [ 897.431590] env[68798]: DEBUG oslo_concurrency.lockutils [None req-11690676-4ed7-4255-b170-2bbf104dc039 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Lock "1752a71d-8cc7-4f88-b097-53094df226e9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.034s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.438772] env[68798]: DEBUG oslo_concurrency.lockutils [None req-50bbba3d-5463-4b17-aed2-a7917fcefa55 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Lock "1752a71d-8cc7-4f88-b097-53094df226e9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 1.130s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.438772] env[68798]: DEBUG oslo_concurrency.lockutils [None req-50bbba3d-5463-4b17-aed2-a7917fcefa55 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Acquiring lock "1752a71d-8cc7-4f88-b097-53094df226e9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.438772] env[68798]: DEBUG oslo_concurrency.lockutils [None req-50bbba3d-5463-4b17-aed2-a7917fcefa55 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Lock "1752a71d-8cc7-4f88-b097-53094df226e9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.438772] env[68798]: DEBUG oslo_concurrency.lockutils [None req-50bbba3d-5463-4b17-aed2-a7917fcefa55 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Lock "1752a71d-8cc7-4f88-b097-53094df226e9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.439644] env[68798]: INFO nova.compute.manager [None req-50bbba3d-5463-4b17-aed2-a7917fcefa55 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] [instance: 1752a71d-8cc7-4f88-b097-53094df226e9] Terminating instance [ 897.443667] env[68798]: DEBUG oslo_concurrency.lockutils [None req-50bbba3d-5463-4b17-aed2-a7917fcefa55 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Acquiring lock "refresh_cache-1752a71d-8cc7-4f88-b097-53094df226e9" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.443667] env[68798]: DEBUG oslo_concurrency.lockutils [None req-50bbba3d-5463-4b17-aed2-a7917fcefa55 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Acquired lock "refresh_cache-1752a71d-8cc7-4f88-b097-53094df226e9" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.443667] env[68798]: DEBUG nova.network.neutron [None req-50bbba3d-5463-4b17-aed2-a7917fcefa55 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] [instance: 1752a71d-8cc7-4f88-b097-53094df226e9] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 897.450067] env[68798]: DEBUG nova.compute.manager [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 897.496244] env[68798]: DEBUG nova.network.neutron [None req-50bbba3d-5463-4b17-aed2-a7917fcefa55 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] [instance: 1752a71d-8cc7-4f88-b097-53094df226e9] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 897.533176] env[68798]: DEBUG oslo_concurrency.lockutils [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.533603] env[68798]: DEBUG oslo_concurrency.lockutils [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.536501] env[68798]: INFO nova.compute.claims [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 898.021233] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92933531-2e97-4ccf-b71b-60d3ff057d88 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.030300] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0fa0081-5c00-4b6b-91af-f570ae0336d8 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.064448] env[68798]: DEBUG nova.network.neutron [None req-50bbba3d-5463-4b17-aed2-a7917fcefa55 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] [instance: 1752a71d-8cc7-4f88-b097-53094df226e9] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.068023] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab78f62-570a-4fcf-9275-159177e794c0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.074809] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187d7440-0f84-46ef-a89f-f6b302b044d5 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.080009] env[68798]: DEBUG oslo_concurrency.lockutils [None req-50bbba3d-5463-4b17-aed2-a7917fcefa55 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Releasing lock "refresh_cache-1752a71d-8cc7-4f88-b097-53094df226e9" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.080426] env[68798]: DEBUG nova.compute.manager [None req-50bbba3d-5463-4b17-aed2-a7917fcefa55 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] [instance: 1752a71d-8cc7-4f88-b097-53094df226e9] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 898.080877] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-50bbba3d-5463-4b17-aed2-a7917fcefa55 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] [instance: 1752a71d-8cc7-4f88-b097-53094df226e9] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 898.081933] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-27811f2c-833a-4d35-bc9f-a182c98fd723 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.096421] env[68798]: DEBUG nova.compute.provider_tree [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 898.107587] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29d0bc47-68ae-4e28-9b58-1c4535b1e75d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.117944] env[68798]: DEBUG nova.scheduler.client.report [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 898.145984] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-50bbba3d-5463-4b17-aed2-a7917fcefa55 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] [instance: 1752a71d-8cc7-4f88-b097-53094df226e9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1752a71d-8cc7-4f88-b097-53094df226e9 could not be found. [ 898.146480] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-50bbba3d-5463-4b17-aed2-a7917fcefa55 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] [instance: 1752a71d-8cc7-4f88-b097-53094df226e9] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 898.146831] env[68798]: INFO nova.compute.manager [None req-50bbba3d-5463-4b17-aed2-a7917fcefa55 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] [instance: 1752a71d-8cc7-4f88-b097-53094df226e9] Took 0.07 seconds to destroy the instance on the hypervisor. [ 898.147271] env[68798]: DEBUG oslo.service.loopingcall [None req-50bbba3d-5463-4b17-aed2-a7917fcefa55 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 898.149359] env[68798]: DEBUG oslo_concurrency.lockutils [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.615s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 898.149359] env[68798]: DEBUG nova.compute.manager [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 898.153742] env[68798]: DEBUG nova.compute.manager [-] [instance: 1752a71d-8cc7-4f88-b097-53094df226e9] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 898.153893] env[68798]: DEBUG nova.network.neutron [-] [instance: 1752a71d-8cc7-4f88-b097-53094df226e9] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 898.197547] env[68798]: DEBUG nova.network.neutron [-] [instance: 1752a71d-8cc7-4f88-b097-53094df226e9] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 898.206134] env[68798]: DEBUG nova.compute.utils [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 898.207569] env[68798]: DEBUG nova.compute.manager [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 898.207747] env[68798]: DEBUG nova.network.neutron [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 898.211668] env[68798]: DEBUG nova.network.neutron [-] [instance: 1752a71d-8cc7-4f88-b097-53094df226e9] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.216595] env[68798]: DEBUG nova.compute.manager [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 898.223076] env[68798]: INFO nova.compute.manager [-] [instance: 1752a71d-8cc7-4f88-b097-53094df226e9] Took 0.07 seconds to deallocate network for instance. [ 898.289285] env[68798]: DEBUG nova.compute.manager [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 898.322053] env[68798]: DEBUG nova.virt.hardware [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 898.322311] env[68798]: DEBUG nova.virt.hardware [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 898.322470] env[68798]: DEBUG nova.virt.hardware [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 898.323236] env[68798]: DEBUG nova.virt.hardware [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 898.323236] env[68798]: DEBUG nova.virt.hardware [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 898.323236] env[68798]: DEBUG nova.virt.hardware [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 898.323236] env[68798]: DEBUG nova.virt.hardware [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 898.323368] env[68798]: DEBUG nova.virt.hardware [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 898.323528] env[68798]: DEBUG nova.virt.hardware [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 898.323704] env[68798]: DEBUG nova.virt.hardware [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 898.323925] env[68798]: DEBUG nova.virt.hardware [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 898.325687] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14cba4de-513e-41e8-a38b-eb64ab36508a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.334388] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9f45528-9f6f-409e-8b59-88666f47f4fb {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.353895] env[68798]: DEBUG oslo_concurrency.lockutils [None req-50bbba3d-5463-4b17-aed2-a7917fcefa55 tempest-ServerGroupTestJSON-571888976 tempest-ServerGroupTestJSON-571888976-project-member] Lock "1752a71d-8cc7-4f88-b097-53094df226e9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.918s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 898.443490] env[68798]: DEBUG nova.policy [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3590574e588843f0b7185e18fa92ccb6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e57669da30594d4abf484262539ea414', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 899.153629] env[68798]: DEBUG nova.network.neutron [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Successfully created port: 5028cced-8128-4d97-8006-4fc9fd47b58e {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 900.738227] env[68798]: DEBUG nova.compute.manager [req-015c5de7-e149-4f00-8501-8079e26a8416 req-e0e5f5cd-0624-434c-a8d5-162d284c4360 service nova] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Received event network-vif-plugged-5028cced-8128-4d97-8006-4fc9fd47b58e {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 900.738818] env[68798]: DEBUG oslo_concurrency.lockutils [req-015c5de7-e149-4f00-8501-8079e26a8416 req-e0e5f5cd-0624-434c-a8d5-162d284c4360 service nova] Acquiring lock "fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.739084] env[68798]: DEBUG oslo_concurrency.lockutils [req-015c5de7-e149-4f00-8501-8079e26a8416 req-e0e5f5cd-0624-434c-a8d5-162d284c4360 service nova] Lock "fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.739208] env[68798]: DEBUG oslo_concurrency.lockutils [req-015c5de7-e149-4f00-8501-8079e26a8416 req-e0e5f5cd-0624-434c-a8d5-162d284c4360 service nova] Lock "fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.739385] env[68798]: DEBUG nova.compute.manager [req-015c5de7-e149-4f00-8501-8079e26a8416 req-e0e5f5cd-0624-434c-a8d5-162d284c4360 service nova] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] No waiting events found dispatching network-vif-plugged-5028cced-8128-4d97-8006-4fc9fd47b58e {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 900.739739] env[68798]: WARNING nova.compute.manager [req-015c5de7-e149-4f00-8501-8079e26a8416 req-e0e5f5cd-0624-434c-a8d5-162d284c4360 service nova] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Received unexpected event network-vif-plugged-5028cced-8128-4d97-8006-4fc9fd47b58e for instance with vm_state building and task_state spawning. [ 900.968533] env[68798]: DEBUG nova.network.neutron [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Successfully updated port: 5028cced-8128-4d97-8006-4fc9fd47b58e {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 900.991254] env[68798]: DEBUG oslo_concurrency.lockutils [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Acquiring lock "refresh_cache-fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.991387] env[68798]: DEBUG oslo_concurrency.lockutils [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Acquired lock "refresh_cache-fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.991546] env[68798]: DEBUG nova.network.neutron [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 901.076712] env[68798]: DEBUG nova.network.neutron [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 901.325167] env[68798]: DEBUG nova.compute.manager [req-d9b1ed8e-1283-49e6-be0a-05f18df777fa req-a3937915-6cf8-4f1b-b830-5fbe94cde23c service nova] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Received event network-changed-5028cced-8128-4d97-8006-4fc9fd47b58e {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 901.325167] env[68798]: DEBUG nova.compute.manager [req-d9b1ed8e-1283-49e6-be0a-05f18df777fa req-a3937915-6cf8-4f1b-b830-5fbe94cde23c service nova] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Refreshing instance network info cache due to event network-changed-5028cced-8128-4d97-8006-4fc9fd47b58e. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 901.325167] env[68798]: DEBUG oslo_concurrency.lockutils [req-d9b1ed8e-1283-49e6-be0a-05f18df777fa req-a3937915-6cf8-4f1b-b830-5fbe94cde23c service nova] Acquiring lock "refresh_cache-fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.382211] env[68798]: DEBUG nova.network.neutron [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Updating instance_info_cache with network_info: [{"id": "5028cced-8128-4d97-8006-4fc9fd47b58e", "address": "fa:16:3e:bc:31:c7", "network": {"id": "579f6646-7b99-4b4f-a7b6-3ef78f94147b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc340e748dae4a43b16acfcfeecd7cd0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5028cced-81", "ovs_interfaceid": "5028cced-8128-4d97-8006-4fc9fd47b58e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.402661] env[68798]: DEBUG oslo_concurrency.lockutils [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Releasing lock "refresh_cache-fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.402998] env[68798]: DEBUG nova.compute.manager [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Instance network_info: |[{"id": "5028cced-8128-4d97-8006-4fc9fd47b58e", "address": "fa:16:3e:bc:31:c7", "network": {"id": "579f6646-7b99-4b4f-a7b6-3ef78f94147b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc340e748dae4a43b16acfcfeecd7cd0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5028cced-81", "ovs_interfaceid": "5028cced-8128-4d97-8006-4fc9fd47b58e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 901.403322] env[68798]: DEBUG oslo_concurrency.lockutils [req-d9b1ed8e-1283-49e6-be0a-05f18df777fa req-a3937915-6cf8-4f1b-b830-5fbe94cde23c service nova] Acquired lock "refresh_cache-fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.403545] env[68798]: DEBUG nova.network.neutron [req-d9b1ed8e-1283-49e6-be0a-05f18df777fa req-a3937915-6cf8-4f1b-b830-5fbe94cde23c service nova] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Refreshing network info cache for port 5028cced-8128-4d97-8006-4fc9fd47b58e {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 901.407383] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bc:31:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '43ad01d2-c7dd-453c-a929-8ad76294d13c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5028cced-8128-4d97-8006-4fc9fd47b58e', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 901.413293] env[68798]: DEBUG oslo.service.loopingcall [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 901.414076] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 901.414326] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9e772d4f-2c6d-490d-b71d-5f3550045596 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.441681] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 901.441681] env[68798]: value = "task-4217581" [ 901.441681] env[68798]: _type = "Task" [ 901.441681] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.450837] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217581, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.882218] env[68798]: DEBUG nova.network.neutron [req-d9b1ed8e-1283-49e6-be0a-05f18df777fa req-a3937915-6cf8-4f1b-b830-5fbe94cde23c service nova] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Updated VIF entry in instance network info cache for port 5028cced-8128-4d97-8006-4fc9fd47b58e. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 901.882630] env[68798]: DEBUG nova.network.neutron [req-d9b1ed8e-1283-49e6-be0a-05f18df777fa req-a3937915-6cf8-4f1b-b830-5fbe94cde23c service nova] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Updating instance_info_cache with network_info: [{"id": "5028cced-8128-4d97-8006-4fc9fd47b58e", "address": "fa:16:3e:bc:31:c7", "network": {"id": "579f6646-7b99-4b4f-a7b6-3ef78f94147b", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.63", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cc340e748dae4a43b16acfcfeecd7cd0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5028cced-81", "ovs_interfaceid": "5028cced-8128-4d97-8006-4fc9fd47b58e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.892988] env[68798]: DEBUG oslo_concurrency.lockutils [req-d9b1ed8e-1283-49e6-be0a-05f18df777fa req-a3937915-6cf8-4f1b-b830-5fbe94cde23c service nova] Releasing lock "refresh_cache-fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.953778] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217581, 'name': CreateVM_Task, 'duration_secs': 0.356874} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.954191] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 901.955076] env[68798]: DEBUG oslo_concurrency.lockutils [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.956620] env[68798]: DEBUG oslo_concurrency.lockutils [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.956620] env[68798]: DEBUG oslo_concurrency.lockutils [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 901.956620] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f397b443-561a-4188-b8a1-07832e250667 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.961974] env[68798]: DEBUG oslo_vmware.api [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Waiting for the task: (returnval){ [ 901.961974] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52868151-c2c2-aeb2-cc3b-5b59f8dda9bd" [ 901.961974] env[68798]: _type = "Task" [ 901.961974] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.972085] env[68798]: DEBUG oslo_vmware.api [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52868151-c2c2-aeb2-cc3b-5b59f8dda9bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.985492] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cc66348a-3a39-4350-b11c-c7ae783a30a3 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Acquiring lock "fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.431801] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Acquiring lock "1ae2e411-d8e4-4abb-8c7b-b907ebba094c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.431801] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Lock "1ae2e411-d8e4-4abb-8c7b-b907ebba094c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.475927] env[68798]: DEBUG oslo_concurrency.lockutils [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.476176] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 902.476393] env[68798]: DEBUG oslo_concurrency.lockutils [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 903.124112] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bdc8455a-855c-4119-bb4a-a15628227e40 tempest-FloatingIPsAssociationTestJSON-1950445159 tempest-FloatingIPsAssociationTestJSON-1950445159-project-member] Acquiring lock "fd8e2ed6-f349-48a5-88db-54dd535587cf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.124514] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bdc8455a-855c-4119-bb4a-a15628227e40 tempest-FloatingIPsAssociationTestJSON-1950445159 tempest-FloatingIPsAssociationTestJSON-1950445159-project-member] Lock "fd8e2ed6-f349-48a5-88db-54dd535587cf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.169831] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 918.170246] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 919.044729] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 919.048062] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 919.048062] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 920.049032] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 920.049032] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 920.049032] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 920.073267] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 920.073456] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 920.073573] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 920.073691] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 920.073815] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 920.073938] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 920.074072] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 920.074195] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 920.074313] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 920.074508] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 920.074637] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 920.075208] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 920.075391] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 920.075522] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 921.070436] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 922.048649] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 922.063742] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.064232] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.064348] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.064514] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 922.066216] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b580cae4-3588-421f-9910-67588bec3eb8 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.075370] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f2c20a-d8c2-439d-94a2-edbf18bf3990 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.091172] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a821ae45-ff8e-4e8b-b319-94c89c364a2b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.098796] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50582e8e-909e-4e30-b44e-4ee2f3fd1fa2 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.130586] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180747MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 922.130763] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.130972] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.210155] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance c1606420-0fd3-4bd3-a8fa-91772c11f9bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 922.210327] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 620ef3f6-0444-474d-8179-3dc0143f2e99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 922.210454] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e060aaea-7508-46ed-8786-b5753fde75e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 922.210574] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 1ecf18e5-a4a1-4efb-b54a-964b064b51e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 922.210690] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 3bf7d713-8315-48d9-85dd-4ff09c9c7782 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 922.210804] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance cdb141da-a05c-4891-a33d-6e12eafe4f22 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 922.210919] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 551707b9-118e-45c8-a28f-e70486272f6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 922.211041] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 89f660c8-6efd-4789-90ee-67e42abc1db7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 922.211814] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 9923a3c7-f090-4a01-8c57-36c8c22c6b14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 922.211814] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 922.224296] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e848c3f4-64ff-4956-88e0-afa27be73068 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 922.235486] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 30e8027d-98b3-4a5f-9eb4-244846cb90e2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 922.246862] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 5a4174a6-bf87-4107-8382-8c0f90253d45 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 922.257213] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance a7060037-2580-464a-b434-90ffe7314bd1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 922.267815] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 40be8e0f-88ab-43bc-9923-5e9e478c1cdf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 922.278680] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e22852fa-7480-4761-8cd2-1371d6cb1410 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 922.289342] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ca8f61e2-513d-48a0-aebd-18507eccd99d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 922.299477] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance dd8c777d-d724-4f7c-9516-448c4b2abb5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 922.309557] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance c9185203-eefd-455a-ba91-ec9797db792e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 922.319750] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance d5f5af2a-c638-4abf-87f7-3fcb6ee0bcdd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 922.329420] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 28ba427d-0034-41e3-b474-eab0eb3c794e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 922.339168] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 63902532-8a21-4dbe-8315-ef6c45f88859 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 922.348497] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance d5d1053f-a0c0-4b93-aef4-60b6ca0dc1ef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 922.359841] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 1ae2e411-d8e4-4abb-8c7b-b907ebba094c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 922.369695] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance fd8e2ed6-f349-48a5-88db-54dd535587cf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 922.369925] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 922.370085] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 922.677892] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-310121b5-30d6-4bc5-9db7-1e677dc2a045 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.686955] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4389cf7-d6fc-44ac-b625-b580cbe76020 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.716427] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-780e626a-afd0-4a0b-80f0-9efe088bda6a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.724465] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a851dd6a-d4c0-49e6-a59a-644a7b799e21 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.738211] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 922.747842] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 922.763299] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 922.763512] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.633s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.443030] env[68798]: WARNING oslo_vmware.rw_handles [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 940.443030] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 940.443030] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 940.443030] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 940.443030] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 940.443030] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 940.443030] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 940.443030] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 940.443030] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 940.443030] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 940.443030] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 940.443030] env[68798]: ERROR oslo_vmware.rw_handles [ 940.443681] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/4b718706-3564-4eec-a2ab-d9c07678ffd3/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 940.445288] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 940.445540] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Copying Virtual Disk [datastore1] vmware_temp/4b718706-3564-4eec-a2ab-d9c07678ffd3/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/4b718706-3564-4eec-a2ab-d9c07678ffd3/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 940.445823] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-de2db0fb-b557-4e67-8cf3-e16f3df8b867 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.454209] env[68798]: DEBUG oslo_vmware.api [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Waiting for the task: (returnval){ [ 940.454209] env[68798]: value = "task-4217582" [ 940.454209] env[68798]: _type = "Task" [ 940.454209] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.463154] env[68798]: DEBUG oslo_vmware.api [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Task: {'id': task-4217582, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.965783] env[68798]: DEBUG oslo_vmware.exceptions [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 940.966095] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 940.966691] env[68798]: ERROR nova.compute.manager [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 940.966691] env[68798]: Faults: ['InvalidArgument'] [ 940.966691] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Traceback (most recent call last): [ 940.966691] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 940.966691] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] yield resources [ 940.966691] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 940.966691] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] self.driver.spawn(context, instance, image_meta, [ 940.966691] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 940.966691] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 940.966691] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 940.966691] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] self._fetch_image_if_missing(context, vi) [ 940.966691] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 940.967368] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] image_cache(vi, tmp_image_ds_loc) [ 940.967368] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 940.967368] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] vm_util.copy_virtual_disk( [ 940.967368] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 940.967368] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] session._wait_for_task(vmdk_copy_task) [ 940.967368] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 940.967368] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] return self.wait_for_task(task_ref) [ 940.967368] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 940.967368] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] return evt.wait() [ 940.967368] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 940.967368] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] result = hub.switch() [ 940.967368] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 940.967368] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] return self.greenlet.switch() [ 940.968083] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 940.968083] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] self.f(*self.args, **self.kw) [ 940.968083] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 940.968083] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] raise exceptions.translate_fault(task_info.error) [ 940.968083] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 940.968083] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Faults: ['InvalidArgument'] [ 940.968083] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] [ 940.968083] env[68798]: INFO nova.compute.manager [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Terminating instance [ 940.968603] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.968848] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 940.969117] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-68678abf-b1d4-47f1-ac87-8962a452066f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.972575] env[68798]: DEBUG nova.compute.manager [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 940.972764] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 940.974085] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ce91c9-27f4-47ee-b8c4-77dba1ea12ac {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.980953] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 940.981197] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-88fd9f83-bc2b-497d-94ba-0f086feb7bc9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.984431] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 940.984533] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 940.985500] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bcae2189-1e45-4cf9-99ef-7a4efe0300d9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.991770] env[68798]: DEBUG oslo_vmware.api [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Waiting for the task: (returnval){ [ 940.991770] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52c7e8de-bcfa-c2db-bc5d-280b3e04bf57" [ 940.991770] env[68798]: _type = "Task" [ 940.991770] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.004908] env[68798]: DEBUG oslo_vmware.api [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52c7e8de-bcfa-c2db-bc5d-280b3e04bf57, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.054607] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 941.054927] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 941.055292] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Deleting the datastore file [datastore1] c1606420-0fd3-4bd3-a8fa-91772c11f9bf {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 941.055627] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7eeac8b5-cc49-402b-9400-cc0ad4ee7b2d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.065558] env[68798]: DEBUG oslo_vmware.api [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Waiting for the task: (returnval){ [ 941.065558] env[68798]: value = "task-4217584" [ 941.065558] env[68798]: _type = "Task" [ 941.065558] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.503043] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 941.503356] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Creating directory with path [datastore1] vmware_temp/77071a9b-2eee-47dc-8ddd-dc833d3333e8/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 941.503585] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-997dcc95-9e37-4444-b0f7-147d4d8e74cc {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.515935] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Created directory with path [datastore1] vmware_temp/77071a9b-2eee-47dc-8ddd-dc833d3333e8/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 941.516153] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Fetch image to [datastore1] vmware_temp/77071a9b-2eee-47dc-8ddd-dc833d3333e8/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 941.516354] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/77071a9b-2eee-47dc-8ddd-dc833d3333e8/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 941.517209] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe584782-828a-4ba9-95c2-466d95d76157 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.524768] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a03acdf6-e5d5-438e-bbe7-f772eb52e78b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.534439] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2906ca4c-4912-4dc3-bbb9-9fd59f4372a4 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.565463] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27ce4103-c731-44f9-b2be-64c6058f803d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.577409] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1874fae2-efa3-456d-a118-2cae480e9e7f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.579229] env[68798]: DEBUG oslo_vmware.api [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Task: {'id': task-4217584, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083944} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.579473] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 941.579653] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 941.579828] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 941.580007] env[68798]: INFO nova.compute.manager [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Took 0.61 seconds to destroy the instance on the hypervisor. [ 941.582674] env[68798]: DEBUG nova.compute.claims [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 941.582815] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.583037] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.606460] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 941.719859] env[68798]: DEBUG oslo_vmware.rw_handles [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/77071a9b-2eee-47dc-8ddd-dc833d3333e8/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 941.780058] env[68798]: DEBUG oslo_vmware.rw_handles [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 941.780262] env[68798]: DEBUG oslo_vmware.rw_handles [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/77071a9b-2eee-47dc-8ddd-dc833d3333e8/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 942.004570] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-378aa86c-9b89-4ac8-8680-13c77b803a49 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.012817] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81d11430-7492-4fcc-9ac1-fadf1eced65c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.042496] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52cf5751-7713-4e0b-ab9d-12a460070030 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.050626] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e2d43c1-818b-475f-890f-b29cdd76977d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.063981] env[68798]: DEBUG nova.compute.provider_tree [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 942.073234] env[68798]: DEBUG nova.scheduler.client.report [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 942.092062] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.509s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.092637] env[68798]: ERROR nova.compute.manager [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 942.092637] env[68798]: Faults: ['InvalidArgument'] [ 942.092637] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Traceback (most recent call last): [ 942.092637] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 942.092637] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] self.driver.spawn(context, instance, image_meta, [ 942.092637] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 942.092637] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 942.092637] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 942.092637] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] self._fetch_image_if_missing(context, vi) [ 942.092637] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 942.092637] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] image_cache(vi, tmp_image_ds_loc) [ 942.092637] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 942.092994] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] vm_util.copy_virtual_disk( [ 942.092994] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 942.092994] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] session._wait_for_task(vmdk_copy_task) [ 942.092994] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 942.092994] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] return self.wait_for_task(task_ref) [ 942.092994] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 942.092994] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] return evt.wait() [ 942.092994] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 942.092994] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] result = hub.switch() [ 942.092994] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 942.092994] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] return self.greenlet.switch() [ 942.092994] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 942.092994] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] self.f(*self.args, **self.kw) [ 942.093358] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 942.093358] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] raise exceptions.translate_fault(task_info.error) [ 942.093358] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 942.093358] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Faults: ['InvalidArgument'] [ 942.093358] env[68798]: ERROR nova.compute.manager [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] [ 942.093553] env[68798]: DEBUG nova.compute.utils [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 942.094964] env[68798]: DEBUG nova.compute.manager [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Build of instance c1606420-0fd3-4bd3-a8fa-91772c11f9bf was re-scheduled: A specified parameter was not correct: fileType [ 942.094964] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 942.095363] env[68798]: DEBUG nova.compute.manager [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 942.095505] env[68798]: DEBUG nova.compute.manager [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 942.095667] env[68798]: DEBUG nova.compute.manager [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 942.095830] env[68798]: DEBUG nova.network.neutron [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 942.432458] env[68798]: DEBUG nova.network.neutron [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.447214] env[68798]: INFO nova.compute.manager [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Took 0.35 seconds to deallocate network for instance. [ 942.550512] env[68798]: INFO nova.scheduler.client.report [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Deleted allocations for instance c1606420-0fd3-4bd3-a8fa-91772c11f9bf [ 942.571263] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8dbbce0b-e9a4-46f8-a87d-c332b425c31a tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Lock "c1606420-0fd3-4bd3-a8fa-91772c11f9bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 327.091s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.572379] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f69061dd-0132-4b60-a66c-7f013400a936 tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Lock "c1606420-0fd3-4bd3-a8fa-91772c11f9bf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 128.861s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.572612] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f69061dd-0132-4b60-a66c-7f013400a936 tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Acquiring lock "c1606420-0fd3-4bd3-a8fa-91772c11f9bf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.572821] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f69061dd-0132-4b60-a66c-7f013400a936 tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Lock "c1606420-0fd3-4bd3-a8fa-91772c11f9bf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.572990] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f69061dd-0132-4b60-a66c-7f013400a936 tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Lock "c1606420-0fd3-4bd3-a8fa-91772c11f9bf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.575105] env[68798]: INFO nova.compute.manager [None req-f69061dd-0132-4b60-a66c-7f013400a936 tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Terminating instance [ 942.577263] env[68798]: DEBUG nova.compute.manager [None req-f69061dd-0132-4b60-a66c-7f013400a936 tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 942.577484] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f69061dd-0132-4b60-a66c-7f013400a936 tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 942.577748] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aafbf5b1-5e0f-4a4f-a6d8-37071e4ea664 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.583546] env[68798]: DEBUG nova.compute.manager [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 942.590224] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e947e018-1179-4e72-83a7-afa277f8289a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.620348] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-f69061dd-0132-4b60-a66c-7f013400a936 tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c1606420-0fd3-4bd3-a8fa-91772c11f9bf could not be found. [ 942.620570] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f69061dd-0132-4b60-a66c-7f013400a936 tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 942.620748] env[68798]: INFO nova.compute.manager [None req-f69061dd-0132-4b60-a66c-7f013400a936 tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Took 0.04 seconds to destroy the instance on the hypervisor. [ 942.620993] env[68798]: DEBUG oslo.service.loopingcall [None req-f69061dd-0132-4b60-a66c-7f013400a936 tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 942.623449] env[68798]: DEBUG nova.compute.manager [-] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 942.623554] env[68798]: DEBUG nova.network.neutron [-] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 942.638120] env[68798]: DEBUG oslo_concurrency.lockutils [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.638372] env[68798]: DEBUG oslo_concurrency.lockutils [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.639980] env[68798]: INFO nova.compute.claims [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 942.655339] env[68798]: DEBUG nova.network.neutron [-] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.671220] env[68798]: INFO nova.compute.manager [-] [instance: c1606420-0fd3-4bd3-a8fa-91772c11f9bf] Took 0.05 seconds to deallocate network for instance. [ 942.817230] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f69061dd-0132-4b60-a66c-7f013400a936 tempest-ServerActionsTestOtherB-368266379 tempest-ServerActionsTestOtherB-368266379-project-member] Lock "c1606420-0fd3-4bd3-a8fa-91772c11f9bf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.244s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.051320] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edd9c5f7-4185-4240-b8ed-10057636f3cd {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.059730] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed57fb80-c139-4e4f-8f1c-8f4b5ad37f92 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.100026] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29f08287-e232-4f12-8fbc-2c6bc778bda0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.105853] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab74cae0-7b1f-4c8c-a631-2c655dd6f5ca {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.120636] env[68798]: DEBUG nova.compute.provider_tree [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 943.133128] env[68798]: DEBUG nova.scheduler.client.report [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 943.152036] env[68798]: DEBUG oslo_concurrency.lockutils [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.512s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.152036] env[68798]: DEBUG nova.compute.manager [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 943.205146] env[68798]: DEBUG nova.compute.utils [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 943.209027] env[68798]: DEBUG nova.compute.manager [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 943.209027] env[68798]: DEBUG nova.network.neutron [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 943.219280] env[68798]: DEBUG nova.compute.manager [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 943.303663] env[68798]: DEBUG nova.compute.manager [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 943.320908] env[68798]: DEBUG nova.policy [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4591576f20d142a0a68342f8a1c9bfc4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f5b33dbd010340649a5c38226ec87f36', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 943.334106] env[68798]: DEBUG nova.virt.hardware [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 943.334420] env[68798]: DEBUG nova.virt.hardware [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 943.334710] env[68798]: DEBUG nova.virt.hardware [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 943.335016] env[68798]: DEBUG nova.virt.hardware [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 943.335275] env[68798]: DEBUG nova.virt.hardware [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 943.335557] env[68798]: DEBUG nova.virt.hardware [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 943.335882] env[68798]: DEBUG nova.virt.hardware [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 943.336175] env[68798]: DEBUG nova.virt.hardware [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 943.336466] env[68798]: DEBUG nova.virt.hardware [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 943.336728] env[68798]: DEBUG nova.virt.hardware [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 943.337029] env[68798]: DEBUG nova.virt.hardware [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 943.337949] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fd67776-0f2b-445b-9b4c-1efc9cfefcf4 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.347426] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d14bef-0311-4dc3-8cfc-ec8d5d8e1fae {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.743316] env[68798]: DEBUG nova.network.neutron [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Successfully created port: 1fe1d732-827e-4139-a377-f4e42a8f213c {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 944.986913] env[68798]: DEBUG nova.compute.manager [req-78249132-c7e0-4b03-83e3-d7dfb7f60285 req-c26bda03-e493-49ba-a486-25eaba008732 service nova] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Received event network-vif-plugged-1fe1d732-827e-4139-a377-f4e42a8f213c {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 944.987220] env[68798]: DEBUG oslo_concurrency.lockutils [req-78249132-c7e0-4b03-83e3-d7dfb7f60285 req-c26bda03-e493-49ba-a486-25eaba008732 service nova] Acquiring lock "e848c3f4-64ff-4956-88e0-afa27be73068-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.987351] env[68798]: DEBUG oslo_concurrency.lockutils [req-78249132-c7e0-4b03-83e3-d7dfb7f60285 req-c26bda03-e493-49ba-a486-25eaba008732 service nova] Lock "e848c3f4-64ff-4956-88e0-afa27be73068-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.987556] env[68798]: DEBUG oslo_concurrency.lockutils [req-78249132-c7e0-4b03-83e3-d7dfb7f60285 req-c26bda03-e493-49ba-a486-25eaba008732 service nova] Lock "e848c3f4-64ff-4956-88e0-afa27be73068-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.987747] env[68798]: DEBUG nova.compute.manager [req-78249132-c7e0-4b03-83e3-d7dfb7f60285 req-c26bda03-e493-49ba-a486-25eaba008732 service nova] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] No waiting events found dispatching network-vif-plugged-1fe1d732-827e-4139-a377-f4e42a8f213c {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 944.987926] env[68798]: WARNING nova.compute.manager [req-78249132-c7e0-4b03-83e3-d7dfb7f60285 req-c26bda03-e493-49ba-a486-25eaba008732 service nova] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Received unexpected event network-vif-plugged-1fe1d732-827e-4139-a377-f4e42a8f213c for instance with vm_state building and task_state spawning. [ 944.994846] env[68798]: DEBUG nova.network.neutron [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Successfully updated port: 1fe1d732-827e-4139-a377-f4e42a8f213c {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 945.012336] env[68798]: DEBUG oslo_concurrency.lockutils [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquiring lock "refresh_cache-e848c3f4-64ff-4956-88e0-afa27be73068" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.012483] env[68798]: DEBUG oslo_concurrency.lockutils [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquired lock "refresh_cache-e848c3f4-64ff-4956-88e0-afa27be73068" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.012633] env[68798]: DEBUG nova.network.neutron [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 945.106108] env[68798]: DEBUG nova.network.neutron [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 945.524178] env[68798]: DEBUG nova.network.neutron [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Updating instance_info_cache with network_info: [{"id": "1fe1d732-827e-4139-a377-f4e42a8f213c", "address": "fa:16:3e:96:f2:3b", "network": {"id": "052d5114-ea0a-492e-aada-eb5e2258f0e6", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1119146414-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5b33dbd010340649a5c38226ec87f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fe1d732-82", "ovs_interfaceid": "1fe1d732-827e-4139-a377-f4e42a8f213c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.541401] env[68798]: DEBUG oslo_concurrency.lockutils [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Releasing lock "refresh_cache-e848c3f4-64ff-4956-88e0-afa27be73068" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 945.541734] env[68798]: DEBUG nova.compute.manager [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Instance network_info: |[{"id": "1fe1d732-827e-4139-a377-f4e42a8f213c", "address": "fa:16:3e:96:f2:3b", "network": {"id": "052d5114-ea0a-492e-aada-eb5e2258f0e6", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1119146414-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5b33dbd010340649a5c38226ec87f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fe1d732-82", "ovs_interfaceid": "1fe1d732-827e-4139-a377-f4e42a8f213c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 945.542330] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:96:f2:3b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5e1049e8-c06b-4c93-a9e1-2cbb530f3f95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1fe1d732-827e-4139-a377-f4e42a8f213c', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 945.551526] env[68798]: DEBUG oslo.service.loopingcall [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 945.551920] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 945.552195] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3255cbdf-5ba8-4cc6-a395-76132d38fc07 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.575733] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 945.575733] env[68798]: value = "task-4217585" [ 945.575733] env[68798]: _type = "Task" [ 945.575733] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.585086] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217585, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.086491] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217585, 'name': CreateVM_Task} progress is 99%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.588778] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217585, 'name': CreateVM_Task} progress is 99%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.051266] env[68798]: DEBUG nova.compute.manager [req-278d205f-45e6-4b85-b8c6-81839cb4b308 req-991438e3-354b-4ec6-ac09-b0267c789be7 service nova] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Received event network-changed-1fe1d732-827e-4139-a377-f4e42a8f213c {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 947.051484] env[68798]: DEBUG nova.compute.manager [req-278d205f-45e6-4b85-b8c6-81839cb4b308 req-991438e3-354b-4ec6-ac09-b0267c789be7 service nova] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Refreshing instance network info cache due to event network-changed-1fe1d732-827e-4139-a377-f4e42a8f213c. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 947.051708] env[68798]: DEBUG oslo_concurrency.lockutils [req-278d205f-45e6-4b85-b8c6-81839cb4b308 req-991438e3-354b-4ec6-ac09-b0267c789be7 service nova] Acquiring lock "refresh_cache-e848c3f4-64ff-4956-88e0-afa27be73068" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.051885] env[68798]: DEBUG oslo_concurrency.lockutils [req-278d205f-45e6-4b85-b8c6-81839cb4b308 req-991438e3-354b-4ec6-ac09-b0267c789be7 service nova] Acquired lock "refresh_cache-e848c3f4-64ff-4956-88e0-afa27be73068" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.052069] env[68798]: DEBUG nova.network.neutron [req-278d205f-45e6-4b85-b8c6-81839cb4b308 req-991438e3-354b-4ec6-ac09-b0267c789be7 service nova] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Refreshing network info cache for port 1fe1d732-827e-4139-a377-f4e42a8f213c {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 947.088865] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217585, 'name': CreateVM_Task, 'duration_secs': 1.322112} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.089080] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 947.089755] env[68798]: DEBUG oslo_concurrency.lockutils [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.089917] env[68798]: DEBUG oslo_concurrency.lockutils [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.090275] env[68798]: DEBUG oslo_concurrency.lockutils [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 947.090531] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65b3cfc7-f63e-491e-88f8-4ce810e77dfd {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.096376] env[68798]: DEBUG oslo_vmware.api [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Waiting for the task: (returnval){ [ 947.096376] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52eb0c39-6207-fd2b-c5de-2154dccfa999" [ 947.096376] env[68798]: _type = "Task" [ 947.096376] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.105345] env[68798]: DEBUG oslo_vmware.api [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52eb0c39-6207-fd2b-c5de-2154dccfa999, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.428085] env[68798]: DEBUG nova.network.neutron [req-278d205f-45e6-4b85-b8c6-81839cb4b308 req-991438e3-354b-4ec6-ac09-b0267c789be7 service nova] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Updated VIF entry in instance network info cache for port 1fe1d732-827e-4139-a377-f4e42a8f213c. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 947.428445] env[68798]: DEBUG nova.network.neutron [req-278d205f-45e6-4b85-b8c6-81839cb4b308 req-991438e3-354b-4ec6-ac09-b0267c789be7 service nova] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Updating instance_info_cache with network_info: [{"id": "1fe1d732-827e-4139-a377-f4e42a8f213c", "address": "fa:16:3e:96:f2:3b", "network": {"id": "052d5114-ea0a-492e-aada-eb5e2258f0e6", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1119146414-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5b33dbd010340649a5c38226ec87f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fe1d732-82", "ovs_interfaceid": "1fe1d732-827e-4139-a377-f4e42a8f213c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.440420] env[68798]: DEBUG oslo_concurrency.lockutils [req-278d205f-45e6-4b85-b8c6-81839cb4b308 req-991438e3-354b-4ec6-ac09-b0267c789be7 service nova] Releasing lock "refresh_cache-e848c3f4-64ff-4956-88e0-afa27be73068" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 947.610035] env[68798]: DEBUG oslo_concurrency.lockutils [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 947.610327] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 947.610547] env[68798]: DEBUG oslo_concurrency.lockutils [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.661890] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d05e571b-859c-4ad0-868e-0b6bbd3eaea7 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquiring lock "e848c3f4-64ff-4956-88e0-afa27be73068" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.847202] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Acquiring lock "5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.847522] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Lock "5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.764875] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 979.049079] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 979.049323] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 980.049056] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 980.049417] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 981.044347] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 981.048055] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 981.048278] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 981.048419] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 981.072619] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 981.072918] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 981.072918] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 981.073063] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 981.073193] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 981.073317] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 981.073437] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 981.073555] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 981.073672] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 981.073786] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 981.073976] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 982.048856] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 982.048856] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 984.048620] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 984.061186] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.061481] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.061687] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.061864] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 984.063174] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dafecb8-fd8a-493f-af96-339fdce8ba1f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.073279] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-288eedd5-d97d-41d7-984c-6f37d5fa4416 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.089162] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f82d163-b56e-46b8-8c8a-9836e5765a84 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.096597] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27957241-a69f-482e-9c73-8657a2854372 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.128186] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180768MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 984.128366] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.128572] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.205581] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 620ef3f6-0444-474d-8179-3dc0143f2e99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 984.205773] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e060aaea-7508-46ed-8786-b5753fde75e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 984.205903] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 1ecf18e5-a4a1-4efb-b54a-964b064b51e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 984.206037] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 3bf7d713-8315-48d9-85dd-4ff09c9c7782 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 984.206163] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance cdb141da-a05c-4891-a33d-6e12eafe4f22 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 984.206283] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 551707b9-118e-45c8-a28f-e70486272f6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 984.206400] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 89f660c8-6efd-4789-90ee-67e42abc1db7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 984.206514] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 9923a3c7-f090-4a01-8c57-36c8c22c6b14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 984.206626] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 984.206738] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e848c3f4-64ff-4956-88e0-afa27be73068 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 984.219876] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 30e8027d-98b3-4a5f-9eb4-244846cb90e2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 984.231066] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 5a4174a6-bf87-4107-8382-8c0f90253d45 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 984.242878] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance a7060037-2580-464a-b434-90ffe7314bd1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 984.253208] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 40be8e0f-88ab-43bc-9923-5e9e478c1cdf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 984.264251] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e22852fa-7480-4761-8cd2-1371d6cb1410 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 984.275717] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ca8f61e2-513d-48a0-aebd-18507eccd99d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 984.287608] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance dd8c777d-d724-4f7c-9516-448c4b2abb5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 984.297773] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance c9185203-eefd-455a-ba91-ec9797db792e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 984.308435] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance d5f5af2a-c638-4abf-87f7-3fcb6ee0bcdd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 984.319921] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 28ba427d-0034-41e3-b474-eab0eb3c794e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 984.329847] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 63902532-8a21-4dbe-8315-ef6c45f88859 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 984.342096] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance d5d1053f-a0c0-4b93-aef4-60b6ca0dc1ef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 984.352978] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 1ae2e411-d8e4-4abb-8c7b-b907ebba094c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 984.363333] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance fd8e2ed6-f349-48a5-88db-54dd535587cf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 984.374937] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 984.375228] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 984.375379] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 984.705696] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c97442c-d561-4bf9-891b-7e4c8709ebdb {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.713940] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d7fb9e6-a62e-49a1-9a3e-377479ef7cb3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.744307] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f1e83ab-e080-4516-b038-4bef5f8d76fd {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.752828] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9877b443-d78a-43b9-b046-47565bb9c537 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.769172] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 984.781571] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 984.796276] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 984.796480] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.668s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.621472] env[68798]: WARNING oslo_vmware.rw_handles [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 987.621472] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 987.621472] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 987.621472] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 987.621472] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 987.621472] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 987.621472] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 987.621472] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 987.621472] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 987.621472] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 987.621472] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 987.621472] env[68798]: ERROR oslo_vmware.rw_handles [ 987.622153] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/77071a9b-2eee-47dc-8ddd-dc833d3333e8/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 987.623813] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 987.624080] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Copying Virtual Disk [datastore1] vmware_temp/77071a9b-2eee-47dc-8ddd-dc833d3333e8/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/77071a9b-2eee-47dc-8ddd-dc833d3333e8/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 987.624361] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-43c7d1cf-5461-4785-a0cd-20e730621b91 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.633825] env[68798]: DEBUG oslo_vmware.api [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Waiting for the task: (returnval){ [ 987.633825] env[68798]: value = "task-4217586" [ 987.633825] env[68798]: _type = "Task" [ 987.633825] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.642508] env[68798]: DEBUG oslo_vmware.api [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Task: {'id': task-4217586, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.144571] env[68798]: DEBUG oslo_vmware.exceptions [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 988.144891] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 988.145524] env[68798]: ERROR nova.compute.manager [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 988.145524] env[68798]: Faults: ['InvalidArgument'] [ 988.145524] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Traceback (most recent call last): [ 988.145524] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 988.145524] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] yield resources [ 988.145524] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 988.145524] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] self.driver.spawn(context, instance, image_meta, [ 988.145524] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 988.145524] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] self._vmops.spawn(context, instance, image_meta, injected_files, [ 988.145524] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 988.145524] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] self._fetch_image_if_missing(context, vi) [ 988.145524] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 988.145889] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] image_cache(vi, tmp_image_ds_loc) [ 988.145889] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 988.145889] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] vm_util.copy_virtual_disk( [ 988.145889] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 988.145889] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] session._wait_for_task(vmdk_copy_task) [ 988.145889] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 988.145889] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] return self.wait_for_task(task_ref) [ 988.145889] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 988.145889] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] return evt.wait() [ 988.145889] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 988.145889] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] result = hub.switch() [ 988.145889] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 988.145889] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] return self.greenlet.switch() [ 988.146270] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 988.146270] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] self.f(*self.args, **self.kw) [ 988.146270] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 988.146270] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] raise exceptions.translate_fault(task_info.error) [ 988.146270] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 988.146270] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Faults: ['InvalidArgument'] [ 988.146270] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] [ 988.146270] env[68798]: INFO nova.compute.manager [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Terminating instance [ 988.147508] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.147718] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 988.147998] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2ec89626-46ec-4f74-a9b2-770e0c501e5e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.150394] env[68798]: DEBUG nova.compute.manager [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 988.150668] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 988.152046] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a282761b-5c7e-40f6-82b4-6a317317df43 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.160127] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 988.160369] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cf7873e0-f71f-4e96-9382-ee4adf10243e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.162753] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 988.162923] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 988.163927] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe5b9755-7d73-4dea-ae9c-2d64fc019d20 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.169163] env[68798]: DEBUG oslo_vmware.api [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Waiting for the task: (returnval){ [ 988.169163] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52862d5d-ef3b-4744-7b01-04040316695f" [ 988.169163] env[68798]: _type = "Task" [ 988.169163] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.177774] env[68798]: DEBUG oslo_vmware.api [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52862d5d-ef3b-4744-7b01-04040316695f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.236042] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 988.236303] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 988.236490] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Deleting the datastore file [datastore1] 620ef3f6-0444-474d-8179-3dc0143f2e99 {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 988.236861] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-44dad7f3-a529-4481-ac3e-f2183980fe9e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.243842] env[68798]: DEBUG oslo_vmware.api [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Waiting for the task: (returnval){ [ 988.243842] env[68798]: value = "task-4217588" [ 988.243842] env[68798]: _type = "Task" [ 988.243842] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.252334] env[68798]: DEBUG oslo_vmware.api [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Task: {'id': task-4217588, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.680410] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 988.680716] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Creating directory with path [datastore1] vmware_temp/b778d46a-654f-4b2a-be7a-afcd84166330/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 988.680934] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-430f2993-0696-487a-be5a-2b75f9537163 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.694016] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Created directory with path [datastore1] vmware_temp/b778d46a-654f-4b2a-be7a-afcd84166330/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 988.694322] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Fetch image to [datastore1] vmware_temp/b778d46a-654f-4b2a-be7a-afcd84166330/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 988.694458] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/b778d46a-654f-4b2a-be7a-afcd84166330/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 988.695315] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-725fd378-7929-4118-b793-a749e0f3face {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.702852] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af5228d2-b18f-4e03-bc1a-864fe2fcba97 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.712476] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-175f8fe8-d208-4f55-82cb-4218dc0fc251 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.744789] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-127718ec-15e9-473d-8cbb-d35329d2a0a0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.756524] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ba33d518-1c95-4baa-9998-184faa5fd746 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.758359] env[68798]: DEBUG oslo_vmware.api [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Task: {'id': task-4217588, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079335} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.758615] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 988.758797] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 988.758964] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 988.759150] env[68798]: INFO nova.compute.manager [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Took 0.61 seconds to destroy the instance on the hypervisor. [ 988.761765] env[68798]: DEBUG nova.compute.claims [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 988.761931] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.762164] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.784080] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 988.838745] env[68798]: DEBUG oslo_vmware.rw_handles [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b778d46a-654f-4b2a-be7a-afcd84166330/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 988.897967] env[68798]: DEBUG oslo_vmware.rw_handles [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 988.898254] env[68798]: DEBUG oslo_vmware.rw_handles [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b778d46a-654f-4b2a-be7a-afcd84166330/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 989.175995] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82546db2-0230-4832-948e-057a9d18c2ad {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.184328] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5fe62c2-e07b-4ab6-a7fd-564af5fc9896 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.214180] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9551c08-3ab5-4769-aea3-d284c79908de {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.222201] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2ae5e60-fc37-4b3b-97f7-3fc67f2ed4a4 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.238030] env[68798]: DEBUG nova.compute.provider_tree [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 989.247416] env[68798]: DEBUG nova.scheduler.client.report [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 989.263310] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.500s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.263310] env[68798]: ERROR nova.compute.manager [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 989.263310] env[68798]: Faults: ['InvalidArgument'] [ 989.263310] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Traceback (most recent call last): [ 989.263310] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 989.263310] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] self.driver.spawn(context, instance, image_meta, [ 989.263310] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 989.263310] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] self._vmops.spawn(context, instance, image_meta, injected_files, [ 989.263310] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 989.263310] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] self._fetch_image_if_missing(context, vi) [ 989.263547] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 989.263547] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] image_cache(vi, tmp_image_ds_loc) [ 989.263547] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 989.263547] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] vm_util.copy_virtual_disk( [ 989.263547] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 989.263547] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] session._wait_for_task(vmdk_copy_task) [ 989.263547] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 989.263547] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] return self.wait_for_task(task_ref) [ 989.263547] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 989.263547] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] return evt.wait() [ 989.263547] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 989.263547] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] result = hub.switch() [ 989.263547] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 989.264014] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] return self.greenlet.switch() [ 989.264014] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 989.264014] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] self.f(*self.args, **self.kw) [ 989.264014] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 989.264014] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] raise exceptions.translate_fault(task_info.error) [ 989.264014] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 989.264014] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Faults: ['InvalidArgument'] [ 989.264014] env[68798]: ERROR nova.compute.manager [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] [ 989.264014] env[68798]: DEBUG nova.compute.utils [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 989.264961] env[68798]: DEBUG nova.compute.manager [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Build of instance 620ef3f6-0444-474d-8179-3dc0143f2e99 was re-scheduled: A specified parameter was not correct: fileType [ 989.264961] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 989.265357] env[68798]: DEBUG nova.compute.manager [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 989.265536] env[68798]: DEBUG nova.compute.manager [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 989.265703] env[68798]: DEBUG nova.compute.manager [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 989.265882] env[68798]: DEBUG nova.network.neutron [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 989.640413] env[68798]: DEBUG nova.network.neutron [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.653374] env[68798]: INFO nova.compute.manager [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Took 0.39 seconds to deallocate network for instance. [ 989.761060] env[68798]: INFO nova.scheduler.client.report [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Deleted allocations for instance 620ef3f6-0444-474d-8179-3dc0143f2e99 [ 989.785892] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14a00b4b-79f4-493f-8a25-ef6faff9355a tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Lock "620ef3f6-0444-474d-8179-3dc0143f2e99" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 363.339s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.788161] env[68798]: DEBUG oslo_concurrency.lockutils [None req-46fe5e63-4ba9-42ef-907e-1fa0bf004168 tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Lock "620ef3f6-0444-474d-8179-3dc0143f2e99" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 164.631s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.788161] env[68798]: DEBUG oslo_concurrency.lockutils [None req-46fe5e63-4ba9-42ef-907e-1fa0bf004168 tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Acquiring lock "620ef3f6-0444-474d-8179-3dc0143f2e99-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.788161] env[68798]: DEBUG oslo_concurrency.lockutils [None req-46fe5e63-4ba9-42ef-907e-1fa0bf004168 tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Lock "620ef3f6-0444-474d-8179-3dc0143f2e99-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.788391] env[68798]: DEBUG oslo_concurrency.lockutils [None req-46fe5e63-4ba9-42ef-907e-1fa0bf004168 tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Lock "620ef3f6-0444-474d-8179-3dc0143f2e99-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.790544] env[68798]: INFO nova.compute.manager [None req-46fe5e63-4ba9-42ef-907e-1fa0bf004168 tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Terminating instance [ 989.792171] env[68798]: DEBUG nova.compute.manager [None req-46fe5e63-4ba9-42ef-907e-1fa0bf004168 tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 989.792395] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-46fe5e63-4ba9-42ef-907e-1fa0bf004168 tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 989.793130] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-18bf66ca-44c5-4364-ab8c-da9949979cae {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.803203] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06cd5bc1-6198-4440-a30c-390289fb9390 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.814463] env[68798]: DEBUG nova.compute.manager [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 989.839334] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-46fe5e63-4ba9-42ef-907e-1fa0bf004168 tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 620ef3f6-0444-474d-8179-3dc0143f2e99 could not be found. [ 989.839630] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-46fe5e63-4ba9-42ef-907e-1fa0bf004168 tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 989.839888] env[68798]: INFO nova.compute.manager [None req-46fe5e63-4ba9-42ef-907e-1fa0bf004168 tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Took 0.05 seconds to destroy the instance on the hypervisor. [ 989.840226] env[68798]: DEBUG oslo.service.loopingcall [None req-46fe5e63-4ba9-42ef-907e-1fa0bf004168 tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 989.840503] env[68798]: DEBUG nova.compute.manager [-] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 989.840717] env[68798]: DEBUG nova.network.neutron [-] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 989.867982] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.868267] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.869771] env[68798]: INFO nova.compute.claims [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 989.881904] env[68798]: DEBUG nova.network.neutron [-] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.899876] env[68798]: INFO nova.compute.manager [-] [instance: 620ef3f6-0444-474d-8179-3dc0143f2e99] Took 0.06 seconds to deallocate network for instance. [ 989.994880] env[68798]: DEBUG oslo_concurrency.lockutils [None req-46fe5e63-4ba9-42ef-907e-1fa0bf004168 tempest-InstanceActionsNegativeTestJSON-821011658 tempest-InstanceActionsNegativeTestJSON-821011658-project-member] Lock "620ef3f6-0444-474d-8179-3dc0143f2e99" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.207s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.230796] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2834a199-d5c6-48e2-8766-ae62eb93f5e1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.238925] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-413b8a24-9ece-4b31-8d32-7c7186fd8240 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.269487] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e39da0d7-cd6e-4f22-8a74-f5cf1cd0c831 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.278356] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f360f37-fa66-42f1-9752-6583598aeb39 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.292283] env[68798]: DEBUG nova.compute.provider_tree [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 990.300977] env[68798]: DEBUG nova.scheduler.client.report [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 990.316206] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.448s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.316709] env[68798]: DEBUG nova.compute.manager [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 990.358244] env[68798]: DEBUG nova.compute.utils [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 990.359578] env[68798]: DEBUG nova.compute.manager [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 990.359990] env[68798]: DEBUG nova.network.neutron [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 990.368592] env[68798]: DEBUG nova.compute.manager [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 990.435588] env[68798]: DEBUG nova.compute.manager [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 990.471154] env[68798]: DEBUG nova.policy [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34dd9d8cfc5f4b24a1883837638437e0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4827ca551c984f14bad7f5dbf5f6d1c4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 990.474768] env[68798]: DEBUG nova.virt.hardware [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 990.474993] env[68798]: DEBUG nova.virt.hardware [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 990.475164] env[68798]: DEBUG nova.virt.hardware [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 990.475347] env[68798]: DEBUG nova.virt.hardware [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 990.475496] env[68798]: DEBUG nova.virt.hardware [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 990.475644] env[68798]: DEBUG nova.virt.hardware [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 990.475854] env[68798]: DEBUG nova.virt.hardware [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 990.476024] env[68798]: DEBUG nova.virt.hardware [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 990.476199] env[68798]: DEBUG nova.virt.hardware [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 990.476366] env[68798]: DEBUG nova.virt.hardware [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 990.476538] env[68798]: DEBUG nova.virt.hardware [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 990.477944] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f36fda4-9701-405f-a873-6f610345130d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.486444] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76ed061a-9db5-4e90-92e7-4fb1cf5373d0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.212160] env[68798]: DEBUG nova.network.neutron [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Successfully created port: 1986588a-8b12-4168-8d8c-1a5df5498951 {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 992.562447] env[68798]: DEBUG nova.compute.manager [req-5d5c42b0-6feb-40ef-82db-610691065c04 req-99804bb5-200c-4e16-916d-7cb005afbc4b service nova] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Received event network-vif-plugged-1986588a-8b12-4168-8d8c-1a5df5498951 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 992.562732] env[68798]: DEBUG oslo_concurrency.lockutils [req-5d5c42b0-6feb-40ef-82db-610691065c04 req-99804bb5-200c-4e16-916d-7cb005afbc4b service nova] Acquiring lock "30e8027d-98b3-4a5f-9eb4-244846cb90e2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.562889] env[68798]: DEBUG oslo_concurrency.lockutils [req-5d5c42b0-6feb-40ef-82db-610691065c04 req-99804bb5-200c-4e16-916d-7cb005afbc4b service nova] Lock "30e8027d-98b3-4a5f-9eb4-244846cb90e2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.563064] env[68798]: DEBUG oslo_concurrency.lockutils [req-5d5c42b0-6feb-40ef-82db-610691065c04 req-99804bb5-200c-4e16-916d-7cb005afbc4b service nova] Lock "30e8027d-98b3-4a5f-9eb4-244846cb90e2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.563228] env[68798]: DEBUG nova.compute.manager [req-5d5c42b0-6feb-40ef-82db-610691065c04 req-99804bb5-200c-4e16-916d-7cb005afbc4b service nova] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] No waiting events found dispatching network-vif-plugged-1986588a-8b12-4168-8d8c-1a5df5498951 {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 992.563387] env[68798]: WARNING nova.compute.manager [req-5d5c42b0-6feb-40ef-82db-610691065c04 req-99804bb5-200c-4e16-916d-7cb005afbc4b service nova] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Received unexpected event network-vif-plugged-1986588a-8b12-4168-8d8c-1a5df5498951 for instance with vm_state building and task_state spawning. [ 992.636448] env[68798]: DEBUG nova.network.neutron [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Successfully updated port: 1986588a-8b12-4168-8d8c-1a5df5498951 {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 992.648374] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Acquiring lock "refresh_cache-30e8027d-98b3-4a5f-9eb4-244846cb90e2" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 992.648561] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Acquired lock "refresh_cache-30e8027d-98b3-4a5f-9eb4-244846cb90e2" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.648738] env[68798]: DEBUG nova.network.neutron [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 992.722667] env[68798]: DEBUG nova.network.neutron [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 992.973018] env[68798]: DEBUG nova.network.neutron [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Updating instance_info_cache with network_info: [{"id": "1986588a-8b12-4168-8d8c-1a5df5498951", "address": "fa:16:3e:a9:95:9d", "network": {"id": "d410409b-eb15-49cc-8c45-b05dab4d39c1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-884415824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4827ca551c984f14bad7f5dbf5f6d1c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1986588a-8b", "ovs_interfaceid": "1986588a-8b12-4168-8d8c-1a5df5498951", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.994243] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Releasing lock "refresh_cache-30e8027d-98b3-4a5f-9eb4-244846cb90e2" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 992.994554] env[68798]: DEBUG nova.compute.manager [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Instance network_info: |[{"id": "1986588a-8b12-4168-8d8c-1a5df5498951", "address": "fa:16:3e:a9:95:9d", "network": {"id": "d410409b-eb15-49cc-8c45-b05dab4d39c1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-884415824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4827ca551c984f14bad7f5dbf5f6d1c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1986588a-8b", "ovs_interfaceid": "1986588a-8b12-4168-8d8c-1a5df5498951", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 992.995017] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:95:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f6d1427-d86b-4371-9172-50e4bb0eb1cb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1986588a-8b12-4168-8d8c-1a5df5498951', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 993.003052] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Creating folder: Project (4827ca551c984f14bad7f5dbf5f6d1c4). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 993.003647] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c99117aa-3c18-4d4b-84c7-df811f5d8bc1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.014815] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Created folder: Project (4827ca551c984f14bad7f5dbf5f6d1c4) in parent group-v834492. [ 993.015013] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Creating folder: Instances. Parent ref: group-v834544. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 993.015303] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b414f9d0-fe13-4278-a403-096757c7db71 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.026396] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Created folder: Instances in parent group-v834544. [ 993.027027] env[68798]: DEBUG oslo.service.loopingcall [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 993.027027] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 993.027194] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-592b7396-f0de-41f2-a39c-a23581441465 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.048170] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 993.048170] env[68798]: value = "task-4217591" [ 993.048170] env[68798]: _type = "Task" [ 993.048170] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.057282] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217591, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.558950] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217591, 'name': CreateVM_Task, 'duration_secs': 0.337233} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.559461] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 993.560326] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.560654] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.561126] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 993.561511] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48ae777c-128e-452c-b524-db5e694c7d13 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.566839] env[68798]: DEBUG oslo_vmware.api [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Waiting for the task: (returnval){ [ 993.566839] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52436940-c4a1-de3d-4775-b58ef622062b" [ 993.566839] env[68798]: _type = "Task" [ 993.566839] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.576229] env[68798]: DEBUG oslo_vmware.api [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52436940-c4a1-de3d-4775-b58ef622062b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.077764] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.078060] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 994.078288] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 994.593580] env[68798]: DEBUG nova.compute.manager [req-174d9e93-49c8-46f0-99d1-eb32cf72adac req-34219f94-235e-4100-bf92-22574f686875 service nova] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Received event network-changed-1986588a-8b12-4168-8d8c-1a5df5498951 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 994.593580] env[68798]: DEBUG nova.compute.manager [req-174d9e93-49c8-46f0-99d1-eb32cf72adac req-34219f94-235e-4100-bf92-22574f686875 service nova] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Refreshing instance network info cache due to event network-changed-1986588a-8b12-4168-8d8c-1a5df5498951. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 994.593580] env[68798]: DEBUG oslo_concurrency.lockutils [req-174d9e93-49c8-46f0-99d1-eb32cf72adac req-34219f94-235e-4100-bf92-22574f686875 service nova] Acquiring lock "refresh_cache-30e8027d-98b3-4a5f-9eb4-244846cb90e2" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 994.593580] env[68798]: DEBUG oslo_concurrency.lockutils [req-174d9e93-49c8-46f0-99d1-eb32cf72adac req-34219f94-235e-4100-bf92-22574f686875 service nova] Acquired lock "refresh_cache-30e8027d-98b3-4a5f-9eb4-244846cb90e2" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.593580] env[68798]: DEBUG nova.network.neutron [req-174d9e93-49c8-46f0-99d1-eb32cf72adac req-34219f94-235e-4100-bf92-22574f686875 service nova] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Refreshing network info cache for port 1986588a-8b12-4168-8d8c-1a5df5498951 {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 994.957209] env[68798]: DEBUG nova.network.neutron [req-174d9e93-49c8-46f0-99d1-eb32cf72adac req-34219f94-235e-4100-bf92-22574f686875 service nova] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Updated VIF entry in instance network info cache for port 1986588a-8b12-4168-8d8c-1a5df5498951. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 994.957642] env[68798]: DEBUG nova.network.neutron [req-174d9e93-49c8-46f0-99d1-eb32cf72adac req-34219f94-235e-4100-bf92-22574f686875 service nova] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Updating instance_info_cache with network_info: [{"id": "1986588a-8b12-4168-8d8c-1a5df5498951", "address": "fa:16:3e:a9:95:9d", "network": {"id": "d410409b-eb15-49cc-8c45-b05dab4d39c1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-884415824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4827ca551c984f14bad7f5dbf5f6d1c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f6d1427-d86b-4371-9172-50e4bb0eb1cb", "external-id": "nsx-vlan-transportzone-979", "segmentation_id": 979, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1986588a-8b", "ovs_interfaceid": "1986588a-8b12-4168-8d8c-1a5df5498951", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.971300] env[68798]: DEBUG oslo_concurrency.lockutils [req-174d9e93-49c8-46f0-99d1-eb32cf72adac req-34219f94-235e-4100-bf92-22574f686875 service nova] Releasing lock "refresh_cache-30e8027d-98b3-4a5f-9eb4-244846cb90e2" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.051886] env[68798]: DEBUG oslo_concurrency.lockutils [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Acquiring lock "5e53196f-984a-4d72-8e00-861ef0751dca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.053615] env[68798]: DEBUG oslo_concurrency.lockutils [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Lock "5e53196f-984a-4d72-8e00-861ef0751dca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.554122] env[68798]: DEBUG oslo_concurrency.lockutils [None req-94a572fa-5613-4057-912e-ecb40322e7dd tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Acquiring lock "02e3ae68-7367-45db-9a2f-01a2e9f703ff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.554297] env[68798]: DEBUG oslo_concurrency.lockutils [None req-94a572fa-5613-4057-912e-ecb40322e7dd tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Lock "02e3ae68-7367-45db-9a2f-01a2e9f703ff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.127605] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ca7f9759-8978-49da-8895-4e335d31fb87 tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Acquiring lock "3a185352-1cdb-4aa1-b163-abc6e712690e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.127934] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ca7f9759-8978-49da-8895-4e335d31fb87 tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Lock "3a185352-1cdb-4aa1-b163-abc6e712690e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.954747] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c4ed49ac-5d2c-4183-bc99-1de6566741bd tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Acquiring lock "30e8027d-98b3-4a5f-9eb4-244846cb90e2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.290966] env[68798]: DEBUG oslo_concurrency.lockutils [None req-94a16663-71c3-4675-bcdf-4321317db602 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquiring lock "be335186-1418-480e-a213-dbe877aa1488" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.291890] env[68798]: DEBUG oslo_concurrency.lockutils [None req-94a16663-71c3-4675-bcdf-4321317db602 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "be335186-1418-480e-a213-dbe877aa1488" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1037.641752] env[68798]: WARNING oslo_vmware.rw_handles [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1037.641752] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1037.641752] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1037.641752] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1037.641752] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1037.641752] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 1037.641752] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1037.641752] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1037.641752] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1037.641752] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1037.641752] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1037.641752] env[68798]: ERROR oslo_vmware.rw_handles [ 1037.642851] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/b778d46a-654f-4b2a-be7a-afcd84166330/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1037.643859] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1037.644563] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Copying Virtual Disk [datastore1] vmware_temp/b778d46a-654f-4b2a-be7a-afcd84166330/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/b778d46a-654f-4b2a-be7a-afcd84166330/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1037.644867] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-42b70c78-d00f-4627-befe-c45b63d328bd {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.654932] env[68798]: DEBUG oslo_vmware.api [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Waiting for the task: (returnval){ [ 1037.654932] env[68798]: value = "task-4217592" [ 1037.654932] env[68798]: _type = "Task" [ 1037.654932] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.665989] env[68798]: DEBUG oslo_vmware.api [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Task: {'id': task-4217592, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.169020] env[68798]: DEBUG oslo_vmware.exceptions [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1038.169322] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1038.169992] env[68798]: ERROR nova.compute.manager [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1038.169992] env[68798]: Faults: ['InvalidArgument'] [ 1038.169992] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Traceback (most recent call last): [ 1038.169992] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1038.169992] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] yield resources [ 1038.169992] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1038.169992] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] self.driver.spawn(context, instance, image_meta, [ 1038.169992] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1038.169992] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1038.169992] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1038.169992] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] self._fetch_image_if_missing(context, vi) [ 1038.169992] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1038.170407] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] image_cache(vi, tmp_image_ds_loc) [ 1038.170407] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1038.170407] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] vm_util.copy_virtual_disk( [ 1038.170407] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1038.170407] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] session._wait_for_task(vmdk_copy_task) [ 1038.170407] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1038.170407] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] return self.wait_for_task(task_ref) [ 1038.170407] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1038.170407] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] return evt.wait() [ 1038.170407] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1038.170407] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] result = hub.switch() [ 1038.170407] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1038.170407] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] return self.greenlet.switch() [ 1038.170714] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1038.170714] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] self.f(*self.args, **self.kw) [ 1038.170714] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1038.170714] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] raise exceptions.translate_fault(task_info.error) [ 1038.170714] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1038.170714] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Faults: ['InvalidArgument'] [ 1038.170714] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] [ 1038.170714] env[68798]: INFO nova.compute.manager [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Terminating instance [ 1038.173753] env[68798]: DEBUG oslo_concurrency.lockutils [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.173753] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1038.174336] env[68798]: DEBUG nova.compute.manager [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1038.174627] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1038.175211] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae1e5fe5-b8f8-4cdf-8a96-d853a5b1a844 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.179057] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cb98b7b-b796-4e5a-9b38-fb11869781de {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.190852] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1038.191386] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-255d9faa-afd7-4df0-8fb6-ae902eaf66aa {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.195495] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1038.195726] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1038.197162] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3409cbd9-8d94-45f9-aab1-a20b467d8593 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.204046] env[68798]: DEBUG oslo_vmware.api [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Waiting for the task: (returnval){ [ 1038.204046] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52b74e1c-e2c1-f582-5962-eb5b2b5509d7" [ 1038.204046] env[68798]: _type = "Task" [ 1038.204046] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.213685] env[68798]: DEBUG oslo_vmware.api [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52b74e1c-e2c1-f582-5962-eb5b2b5509d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.280970] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1038.280970] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1038.281437] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Deleting the datastore file [datastore1] e060aaea-7508-46ed-8786-b5753fde75e9 {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1038.281812] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e70d3b07-6c3c-4ab1-a037-085b30b6c9a0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.290852] env[68798]: DEBUG oslo_vmware.api [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Waiting for the task: (returnval){ [ 1038.290852] env[68798]: value = "task-4217594" [ 1038.290852] env[68798]: _type = "Task" [ 1038.290852] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.306032] env[68798]: DEBUG oslo_vmware.api [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Task: {'id': task-4217594, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.719959] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1038.721550] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Creating directory with path [datastore1] vmware_temp/e506d716-6b02-481d-86c0-c41801d3d208/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1038.721835] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-47f977dd-f956-40d8-ab99-224443bd2a67 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.738164] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Created directory with path [datastore1] vmware_temp/e506d716-6b02-481d-86c0-c41801d3d208/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1038.738164] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Fetch image to [datastore1] vmware_temp/e506d716-6b02-481d-86c0-c41801d3d208/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1038.738164] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/e506d716-6b02-481d-86c0-c41801d3d208/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1038.738164] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce960d4d-b1af-423a-9fe5-39f2cd481d29 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.747598] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8abc3c3b-0cbe-4281-804d-3ce527b25b39 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.764276] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc110d20-83ef-4b43-93e6-e3357f300983 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.805023] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fa2e209-05a5-467e-91af-52527562f741 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.818020] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2c18fe0f-081f-47d0-9909-a64a0f338015 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.818020] env[68798]: DEBUG oslo_vmware.api [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Task: {'id': task-4217594, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079136} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.818020] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1038.818020] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1038.818020] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1038.818302] env[68798]: INFO nova.compute.manager [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Took 0.64 seconds to destroy the instance on the hypervisor. [ 1038.820513] env[68798]: DEBUG nova.compute.claims [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1038.820513] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.820513] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1038.842592] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1038.908562] env[68798]: DEBUG oslo_vmware.rw_handles [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e506d716-6b02-481d-86c0-c41801d3d208/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1038.977535] env[68798]: DEBUG oslo_vmware.rw_handles [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1038.979083] env[68798]: DEBUG oslo_vmware.rw_handles [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e506d716-6b02-481d-86c0-c41801d3d208/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1039.434351] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d37d34ee-26b1-45cc-8289-02d7a114120e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.441629] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b75a179a-d1b6-439b-833b-43027881125c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.478315] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-218d2bb7-9dc0-4c8d-9bbe-b200ecd95739 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.486691] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-717db94c-1564-4877-9d32-9b91f7f4fc4d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.500830] env[68798]: DEBUG nova.compute.provider_tree [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1039.511328] env[68798]: DEBUG nova.scheduler.client.report [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1039.530269] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.710s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.530871] env[68798]: ERROR nova.compute.manager [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1039.530871] env[68798]: Faults: ['InvalidArgument'] [ 1039.530871] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Traceback (most recent call last): [ 1039.530871] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1039.530871] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] self.driver.spawn(context, instance, image_meta, [ 1039.530871] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1039.530871] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1039.530871] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1039.530871] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] self._fetch_image_if_missing(context, vi) [ 1039.530871] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1039.530871] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] image_cache(vi, tmp_image_ds_loc) [ 1039.530871] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1039.531309] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] vm_util.copy_virtual_disk( [ 1039.531309] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1039.531309] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] session._wait_for_task(vmdk_copy_task) [ 1039.531309] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1039.531309] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] return self.wait_for_task(task_ref) [ 1039.531309] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1039.531309] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] return evt.wait() [ 1039.531309] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1039.531309] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] result = hub.switch() [ 1039.531309] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1039.531309] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] return self.greenlet.switch() [ 1039.531309] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1039.531309] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] self.f(*self.args, **self.kw) [ 1039.531592] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1039.531592] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] raise exceptions.translate_fault(task_info.error) [ 1039.531592] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1039.531592] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Faults: ['InvalidArgument'] [ 1039.531592] env[68798]: ERROR nova.compute.manager [instance: e060aaea-7508-46ed-8786-b5753fde75e9] [ 1039.531706] env[68798]: DEBUG nova.compute.utils [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1039.533788] env[68798]: DEBUG nova.compute.manager [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Build of instance e060aaea-7508-46ed-8786-b5753fde75e9 was re-scheduled: A specified parameter was not correct: fileType [ 1039.533788] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1039.534181] env[68798]: DEBUG nova.compute.manager [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1039.534361] env[68798]: DEBUG nova.compute.manager [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1039.534514] env[68798]: DEBUG nova.compute.manager [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1039.534674] env[68798]: DEBUG nova.network.neutron [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1039.743859] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Acquiring lock "7bea1932-0490-409b-99b0-bd1f3f1a9d5d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.744225] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Lock "7bea1932-0490-409b-99b0-bd1f3f1a9d5d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.796934] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1040.048914] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1040.408627] env[68798]: DEBUG nova.network.neutron [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.423734] env[68798]: INFO nova.compute.manager [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Took 0.89 seconds to deallocate network for instance. [ 1040.577018] env[68798]: INFO nova.scheduler.client.report [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Deleted allocations for instance e060aaea-7508-46ed-8786-b5753fde75e9 [ 1040.611028] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6d20e0c6-da0a-44eb-ad35-1ffaad053846 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Lock "e060aaea-7508-46ed-8786-b5753fde75e9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 413.042s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.612447] env[68798]: DEBUG oslo_concurrency.lockutils [None req-51c25592-89e8-4067-b26e-fefcdd87df38 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Lock "e060aaea-7508-46ed-8786-b5753fde75e9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 214.452s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.612663] env[68798]: DEBUG oslo_concurrency.lockutils [None req-51c25592-89e8-4067-b26e-fefcdd87df38 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Acquiring lock "e060aaea-7508-46ed-8786-b5753fde75e9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.612720] env[68798]: DEBUG oslo_concurrency.lockutils [None req-51c25592-89e8-4067-b26e-fefcdd87df38 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Lock "e060aaea-7508-46ed-8786-b5753fde75e9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.612883] env[68798]: DEBUG oslo_concurrency.lockutils [None req-51c25592-89e8-4067-b26e-fefcdd87df38 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Lock "e060aaea-7508-46ed-8786-b5753fde75e9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.616759] env[68798]: INFO nova.compute.manager [None req-51c25592-89e8-4067-b26e-fefcdd87df38 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Terminating instance [ 1040.619914] env[68798]: DEBUG nova.compute.manager [None req-51c25592-89e8-4067-b26e-fefcdd87df38 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1040.619914] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-51c25592-89e8-4067-b26e-fefcdd87df38 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1040.619914] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6767c421-2425-4df3-a1db-21afc9b41fa5 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.627528] env[68798]: DEBUG nova.compute.manager [None req-822d4ae7-c419-400f-a7c2-3e2307d910da tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 5a4174a6-bf87-4107-8382-8c0f90253d45] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1040.639311] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54593bbb-723e-4d17-9f6c-0cc3b468ae4f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.674589] env[68798]: DEBUG nova.compute.manager [None req-822d4ae7-c419-400f-a7c2-3e2307d910da tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 5a4174a6-bf87-4107-8382-8c0f90253d45] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1040.676731] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-51c25592-89e8-4067-b26e-fefcdd87df38 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e060aaea-7508-46ed-8786-b5753fde75e9 could not be found. [ 1040.676977] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-51c25592-89e8-4067-b26e-fefcdd87df38 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1040.677195] env[68798]: INFO nova.compute.manager [None req-51c25592-89e8-4067-b26e-fefcdd87df38 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1040.677715] env[68798]: DEBUG oslo.service.loopingcall [None req-51c25592-89e8-4067-b26e-fefcdd87df38 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1040.678105] env[68798]: DEBUG nova.compute.manager [-] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1040.678233] env[68798]: DEBUG nova.network.neutron [-] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1040.709303] env[68798]: DEBUG oslo_concurrency.lockutils [None req-822d4ae7-c419-400f-a7c2-3e2307d910da tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Lock "5a4174a6-bf87-4107-8382-8c0f90253d45" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.459s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.725030] env[68798]: DEBUG nova.network.neutron [-] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.741601] env[68798]: DEBUG nova.compute.manager [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1040.748837] env[68798]: INFO nova.compute.manager [-] [instance: e060aaea-7508-46ed-8786-b5753fde75e9] Took 0.07 seconds to deallocate network for instance. [ 1040.816608] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.816608] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.818097] env[68798]: INFO nova.compute.claims [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1040.890901] env[68798]: DEBUG oslo_concurrency.lockutils [None req-51c25592-89e8-4067-b26e-fefcdd87df38 tempest-ServersAdminNegativeTestJSON-1725783780 tempest-ServersAdminNegativeTestJSON-1725783780-project-member] Lock "e060aaea-7508-46ed-8786-b5753fde75e9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.279s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.929993] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0420805a-e4d1-461e-ae95-bed495d2f593 tempest-ImagesOneServerNegativeTestJSON-1064251384 tempest-ImagesOneServerNegativeTestJSON-1064251384-project-member] Acquiring lock "6669c663-e5ca-4257-b7aa-f694b12f91d4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.930236] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0420805a-e4d1-461e-ae95-bed495d2f593 tempest-ImagesOneServerNegativeTestJSON-1064251384 tempest-ImagesOneServerNegativeTestJSON-1064251384-project-member] Lock "6669c663-e5ca-4257-b7aa-f694b12f91d4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.044503] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1041.048849] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1041.049054] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1041.049193] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1041.078473] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1041.078473] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1041.078614] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1041.078659] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1041.078811] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1041.078908] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1041.079032] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1041.079171] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1041.079290] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1041.079540] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1041.079540] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1041.083316] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1041.083316] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1041.372919] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05091fde-2932-4fe1-aedf-a5a3f9fde7c0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.382907] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea847ee2-8df3-4050-91d2-af7624f00b45 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.419495] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-622820a7-74a8-48d8-9ecc-53ffd8e5ced0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.430588] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-122547d9-efd6-45a3-9ac4-1dde66047b10 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.447337] env[68798]: DEBUG nova.compute.provider_tree [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1041.460425] env[68798]: DEBUG nova.scheduler.client.report [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1041.478869] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.662s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.479407] env[68798]: DEBUG nova.compute.manager [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1041.525774] env[68798]: DEBUG nova.compute.utils [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1041.527433] env[68798]: DEBUG nova.compute.manager [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1041.527543] env[68798]: DEBUG nova.network.neutron [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1041.538595] env[68798]: DEBUG nova.compute.manager [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1041.602913] env[68798]: DEBUG nova.policy [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ed1228668a7340acb4839090c97d4915', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '897bb38c932f48fdb39e4972cb8994a3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 1041.614794] env[68798]: DEBUG nova.compute.manager [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1041.651297] env[68798]: DEBUG nova.virt.hardware [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1041.651551] env[68798]: DEBUG nova.virt.hardware [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1041.651745] env[68798]: DEBUG nova.virt.hardware [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1041.651967] env[68798]: DEBUG nova.virt.hardware [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1041.652230] env[68798]: DEBUG nova.virt.hardware [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1041.652469] env[68798]: DEBUG nova.virt.hardware [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1041.652687] env[68798]: DEBUG nova.virt.hardware [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1041.652895] env[68798]: DEBUG nova.virt.hardware [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1041.653115] env[68798]: DEBUG nova.virt.hardware [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1041.653321] env[68798]: DEBUG nova.virt.hardware [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1041.653534] env[68798]: DEBUG nova.virt.hardware [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1041.654808] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c70666b-cbeb-4297-ab2e-457d7465e191 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.664594] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdd47b60-0d86-4efe-a42d-b86b241e7b8d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.030333] env[68798]: DEBUG nova.network.neutron [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Successfully created port: 9e8c895f-1659-4135-813a-247b63794212 {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1042.049081] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1042.513934] env[68798]: DEBUG nova.network.neutron [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Successfully created port: 5346e211-53a2-41e4-8a70-3c9518cc8cb1 {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1043.442381] env[68798]: DEBUG nova.network.neutron [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Successfully updated port: 9e8c895f-1659-4135-813a-247b63794212 {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1043.498142] env[68798]: DEBUG nova.compute.manager [req-c3cbfd07-d02d-40b6-aca7-c5a439118346 req-aadb05ab-4d15-4479-9394-e08e840cb5e8 service nova] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Received event network-vif-plugged-9e8c895f-1659-4135-813a-247b63794212 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1043.498447] env[68798]: DEBUG oslo_concurrency.lockutils [req-c3cbfd07-d02d-40b6-aca7-c5a439118346 req-aadb05ab-4d15-4479-9394-e08e840cb5e8 service nova] Acquiring lock "a7060037-2580-464a-b434-90ffe7314bd1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.498634] env[68798]: DEBUG oslo_concurrency.lockutils [req-c3cbfd07-d02d-40b6-aca7-c5a439118346 req-aadb05ab-4d15-4479-9394-e08e840cb5e8 service nova] Lock "a7060037-2580-464a-b434-90ffe7314bd1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.498800] env[68798]: DEBUG oslo_concurrency.lockutils [req-c3cbfd07-d02d-40b6-aca7-c5a439118346 req-aadb05ab-4d15-4479-9394-e08e840cb5e8 service nova] Lock "a7060037-2580-464a-b434-90ffe7314bd1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.498968] env[68798]: DEBUG nova.compute.manager [req-c3cbfd07-d02d-40b6-aca7-c5a439118346 req-aadb05ab-4d15-4479-9394-e08e840cb5e8 service nova] [instance: a7060037-2580-464a-b434-90ffe7314bd1] No waiting events found dispatching network-vif-plugged-9e8c895f-1659-4135-813a-247b63794212 {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1043.499297] env[68798]: WARNING nova.compute.manager [req-c3cbfd07-d02d-40b6-aca7-c5a439118346 req-aadb05ab-4d15-4479-9394-e08e840cb5e8 service nova] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Received unexpected event network-vif-plugged-9e8c895f-1659-4135-813a-247b63794212 for instance with vm_state building and task_state spawning. [ 1044.045885] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1044.079778] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1044.079778] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1044.731878] env[68798]: DEBUG nova.network.neutron [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Successfully updated port: 5346e211-53a2-41e4-8a70-3c9518cc8cb1 {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1044.748648] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Acquiring lock "refresh_cache-a7060037-2580-464a-b434-90ffe7314bd1" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1044.748790] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Acquired lock "refresh_cache-a7060037-2580-464a-b434-90ffe7314bd1" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.748938] env[68798]: DEBUG nova.network.neutron [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1044.768267] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d65cbdac-0e16-4465-83d7-1b6102f8ee07 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Acquiring lock "a7060037-2580-464a-b434-90ffe7314bd1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.812947] env[68798]: DEBUG nova.network.neutron [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1045.372274] env[68798]: DEBUG nova.network.neutron [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Updating instance_info_cache with network_info: [{"id": "9e8c895f-1659-4135-813a-247b63794212", "address": "fa:16:3e:21:80:63", "network": {"id": "d53c5ff5-9ed4-4e0b-bd98-9ee6f7d36d8e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-686976316", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.15", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "897bb38c932f48fdb39e4972cb8994a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e8c895f-16", "ovs_interfaceid": "9e8c895f-1659-4135-813a-247b63794212", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5346e211-53a2-41e4-8a70-3c9518cc8cb1", "address": "fa:16:3e:ed:f7:27", "network": {"id": "b7d30f94-8a29-4935-8b29-d01301baceff", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-50618664", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.22", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "897bb38c932f48fdb39e4972cb8994a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f66f8375-4460-4acd-987b-acda72bfcf0d", "external-id": "nsx-vlan-transportzone-533", "segmentation_id": 533, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5346e211-53", "ovs_interfaceid": "5346e211-53a2-41e4-8a70-3c9518cc8cb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.393694] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Releasing lock "refresh_cache-a7060037-2580-464a-b434-90ffe7314bd1" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1045.394070] env[68798]: DEBUG nova.compute.manager [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Instance network_info: |[{"id": "9e8c895f-1659-4135-813a-247b63794212", "address": "fa:16:3e:21:80:63", "network": {"id": "d53c5ff5-9ed4-4e0b-bd98-9ee6f7d36d8e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-686976316", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.15", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "897bb38c932f48fdb39e4972cb8994a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e8c895f-16", "ovs_interfaceid": "9e8c895f-1659-4135-813a-247b63794212", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5346e211-53a2-41e4-8a70-3c9518cc8cb1", "address": "fa:16:3e:ed:f7:27", "network": {"id": "b7d30f94-8a29-4935-8b29-d01301baceff", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-50618664", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.22", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "897bb38c932f48fdb39e4972cb8994a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f66f8375-4460-4acd-987b-acda72bfcf0d", "external-id": "nsx-vlan-transportzone-533", "segmentation_id": 533, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5346e211-53", "ovs_interfaceid": "5346e211-53a2-41e4-8a70-3c9518cc8cb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1045.394516] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:80:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '305ccd93-08cb-4658-845c-d9b64952daf7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e8c895f-1659-4135-813a-247b63794212', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:f7:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f66f8375-4460-4acd-987b-acda72bfcf0d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5346e211-53a2-41e4-8a70-3c9518cc8cb1', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1045.405301] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Creating folder: Project (897bb38c932f48fdb39e4972cb8994a3). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1045.405301] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eec07d5b-3e91-4776-8bf0-453ac2f53c94 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.415851] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Created folder: Project (897bb38c932f48fdb39e4972cb8994a3) in parent group-v834492. [ 1045.416080] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Creating folder: Instances. Parent ref: group-v834547. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1045.416344] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-289f72c8-ce11-4ccd-99af-8a702e0a7693 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.426099] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Created folder: Instances in parent group-v834547. [ 1045.426386] env[68798]: DEBUG oslo.service.loopingcall [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1045.426588] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1045.426791] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f03635c3-9dfb-459b-b319-32d43184f107 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.449033] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1045.449033] env[68798]: value = "task-4217597" [ 1045.449033] env[68798]: _type = "Task" [ 1045.449033] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.458844] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217597, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.538220] env[68798]: DEBUG nova.compute.manager [req-65164f9a-6513-464c-9f5a-f19a185d8621 req-67dff84f-b053-4c5b-a4d8-dfe6a1968d15 service nova] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Received event network-changed-9e8c895f-1659-4135-813a-247b63794212 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1045.538220] env[68798]: DEBUG nova.compute.manager [req-65164f9a-6513-464c-9f5a-f19a185d8621 req-67dff84f-b053-4c5b-a4d8-dfe6a1968d15 service nova] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Refreshing instance network info cache due to event network-changed-9e8c895f-1659-4135-813a-247b63794212. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1045.538220] env[68798]: DEBUG oslo_concurrency.lockutils [req-65164f9a-6513-464c-9f5a-f19a185d8621 req-67dff84f-b053-4c5b-a4d8-dfe6a1968d15 service nova] Acquiring lock "refresh_cache-a7060037-2580-464a-b434-90ffe7314bd1" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1045.538220] env[68798]: DEBUG oslo_concurrency.lockutils [req-65164f9a-6513-464c-9f5a-f19a185d8621 req-67dff84f-b053-4c5b-a4d8-dfe6a1968d15 service nova] Acquired lock "refresh_cache-a7060037-2580-464a-b434-90ffe7314bd1" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.545101] env[68798]: DEBUG nova.network.neutron [req-65164f9a-6513-464c-9f5a-f19a185d8621 req-67dff84f-b053-4c5b-a4d8-dfe6a1968d15 service nova] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Refreshing network info cache for port 9e8c895f-1659-4135-813a-247b63794212 {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1045.962199] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217597, 'name': CreateVM_Task, 'duration_secs': 0.379454} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.962460] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1045.963422] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1045.963665] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.964138] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1045.964517] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c347d4e-79a0-48be-b89a-fe291436ab79 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.971118] env[68798]: DEBUG oslo_vmware.api [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Waiting for the task: (returnval){ [ 1045.971118] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52ffd1aa-5278-eaa5-927e-f542ef76c218" [ 1045.971118] env[68798]: _type = "Task" [ 1045.971118] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.981378] env[68798]: DEBUG oslo_vmware.api [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52ffd1aa-5278-eaa5-927e-f542ef76c218, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.048666] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1046.069777] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.070086] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.071477] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1046.071477] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1046.073700] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4563769-55b8-4423-a05d-57d5a55c8649 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.084507] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-768000fd-69b8-4065-99ce-1628155113ee {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.104026] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88b4e34d-6ebe-4f64-8d28-2bfd894cd528 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.111488] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-589710f5-4876-4734-abc3-cbc6f7e1a40b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.154144] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180769MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1046.154144] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.154395] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.161054] env[68798]: DEBUG nova.network.neutron [req-65164f9a-6513-464c-9f5a-f19a185d8621 req-67dff84f-b053-4c5b-a4d8-dfe6a1968d15 service nova] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Updated VIF entry in instance network info cache for port 9e8c895f-1659-4135-813a-247b63794212. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1046.161054] env[68798]: DEBUG nova.network.neutron [req-65164f9a-6513-464c-9f5a-f19a185d8621 req-67dff84f-b053-4c5b-a4d8-dfe6a1968d15 service nova] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Updating instance_info_cache with network_info: [{"id": "9e8c895f-1659-4135-813a-247b63794212", "address": "fa:16:3e:21:80:63", "network": {"id": "d53c5ff5-9ed4-4e0b-bd98-9ee6f7d36d8e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-686976316", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.15", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "897bb38c932f48fdb39e4972cb8994a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e8c895f-16", "ovs_interfaceid": "9e8c895f-1659-4135-813a-247b63794212", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5346e211-53a2-41e4-8a70-3c9518cc8cb1", "address": "fa:16:3e:ed:f7:27", "network": {"id": "b7d30f94-8a29-4935-8b29-d01301baceff", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-50618664", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.22", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "897bb38c932f48fdb39e4972cb8994a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f66f8375-4460-4acd-987b-acda72bfcf0d", "external-id": "nsx-vlan-transportzone-533", "segmentation_id": 533, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5346e211-53", "ovs_interfaceid": "5346e211-53a2-41e4-8a70-3c9518cc8cb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.175520] env[68798]: DEBUG oslo_concurrency.lockutils [req-65164f9a-6513-464c-9f5a-f19a185d8621 req-67dff84f-b053-4c5b-a4d8-dfe6a1968d15 service nova] Releasing lock "refresh_cache-a7060037-2580-464a-b434-90ffe7314bd1" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1046.175790] env[68798]: DEBUG nova.compute.manager [req-65164f9a-6513-464c-9f5a-f19a185d8621 req-67dff84f-b053-4c5b-a4d8-dfe6a1968d15 service nova] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Received event network-vif-plugged-5346e211-53a2-41e4-8a70-3c9518cc8cb1 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1046.175981] env[68798]: DEBUG oslo_concurrency.lockutils [req-65164f9a-6513-464c-9f5a-f19a185d8621 req-67dff84f-b053-4c5b-a4d8-dfe6a1968d15 service nova] Acquiring lock "a7060037-2580-464a-b434-90ffe7314bd1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.176206] env[68798]: DEBUG oslo_concurrency.lockutils [req-65164f9a-6513-464c-9f5a-f19a185d8621 req-67dff84f-b053-4c5b-a4d8-dfe6a1968d15 service nova] Lock "a7060037-2580-464a-b434-90ffe7314bd1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.176418] env[68798]: DEBUG oslo_concurrency.lockutils [req-65164f9a-6513-464c-9f5a-f19a185d8621 req-67dff84f-b053-4c5b-a4d8-dfe6a1968d15 service nova] Lock "a7060037-2580-464a-b434-90ffe7314bd1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1046.176733] env[68798]: DEBUG nova.compute.manager [req-65164f9a-6513-464c-9f5a-f19a185d8621 req-67dff84f-b053-4c5b-a4d8-dfe6a1968d15 service nova] [instance: a7060037-2580-464a-b434-90ffe7314bd1] No waiting events found dispatching network-vif-plugged-5346e211-53a2-41e4-8a70-3c9518cc8cb1 {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1046.176924] env[68798]: WARNING nova.compute.manager [req-65164f9a-6513-464c-9f5a-f19a185d8621 req-67dff84f-b053-4c5b-a4d8-dfe6a1968d15 service nova] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Received unexpected event network-vif-plugged-5346e211-53a2-41e4-8a70-3c9518cc8cb1 for instance with vm_state building and task_state deleting. [ 1046.177104] env[68798]: DEBUG nova.compute.manager [req-65164f9a-6513-464c-9f5a-f19a185d8621 req-67dff84f-b053-4c5b-a4d8-dfe6a1968d15 service nova] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Received event network-changed-5346e211-53a2-41e4-8a70-3c9518cc8cb1 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1046.177291] env[68798]: DEBUG nova.compute.manager [req-65164f9a-6513-464c-9f5a-f19a185d8621 req-67dff84f-b053-4c5b-a4d8-dfe6a1968d15 service nova] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Refreshing instance network info cache due to event network-changed-5346e211-53a2-41e4-8a70-3c9518cc8cb1. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1046.177494] env[68798]: DEBUG oslo_concurrency.lockutils [req-65164f9a-6513-464c-9f5a-f19a185d8621 req-67dff84f-b053-4c5b-a4d8-dfe6a1968d15 service nova] Acquiring lock "refresh_cache-a7060037-2580-464a-b434-90ffe7314bd1" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1046.177633] env[68798]: DEBUG oslo_concurrency.lockutils [req-65164f9a-6513-464c-9f5a-f19a185d8621 req-67dff84f-b053-4c5b-a4d8-dfe6a1968d15 service nova] Acquired lock "refresh_cache-a7060037-2580-464a-b434-90ffe7314bd1" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.177858] env[68798]: DEBUG nova.network.neutron [req-65164f9a-6513-464c-9f5a-f19a185d8621 req-67dff84f-b053-4c5b-a4d8-dfe6a1968d15 service nova] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Refreshing network info cache for port 5346e211-53a2-41e4-8a70-3c9518cc8cb1 {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1046.262550] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 1ecf18e5-a4a1-4efb-b54a-964b064b51e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.262744] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 3bf7d713-8315-48d9-85dd-4ff09c9c7782 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.263106] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance cdb141da-a05c-4891-a33d-6e12eafe4f22 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.263106] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 551707b9-118e-45c8-a28f-e70486272f6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.263216] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 89f660c8-6efd-4789-90ee-67e42abc1db7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.263244] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 9923a3c7-f090-4a01-8c57-36c8c22c6b14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.263360] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.263585] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e848c3f4-64ff-4956-88e0-afa27be73068 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.263772] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 30e8027d-98b3-4a5f-9eb4-244846cb90e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.264045] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance a7060037-2580-464a-b434-90ffe7314bd1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1046.278488] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 40be8e0f-88ab-43bc-9923-5e9e478c1cdf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.293837] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e22852fa-7480-4761-8cd2-1371d6cb1410 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.309979] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ca8f61e2-513d-48a0-aebd-18507eccd99d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.328029] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance dd8c777d-d724-4f7c-9516-448c4b2abb5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.347200] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance c9185203-eefd-455a-ba91-ec9797db792e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.360417] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance d5f5af2a-c638-4abf-87f7-3fcb6ee0bcdd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.384914] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 28ba427d-0034-41e3-b474-eab0eb3c794e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.402469] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 63902532-8a21-4dbe-8315-ef6c45f88859 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.416907] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance d5d1053f-a0c0-4b93-aef4-60b6ca0dc1ef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.431228] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 1ae2e411-d8e4-4abb-8c7b-b907ebba094c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.447644] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance fd8e2ed6-f349-48a5-88db-54dd535587cf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.461499] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.476950] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 5e53196f-984a-4d72-8e00-861ef0751dca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.485940] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1046.486158] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1046.486379] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1046.494943] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 02e3ae68-7367-45db-9a2f-01a2e9f703ff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.515305] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 3a185352-1cdb-4aa1-b163-abc6e712690e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.530416] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance be335186-1418-480e-a213-dbe877aa1488 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.550257] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 7bea1932-0490-409b-99b0-bd1f3f1a9d5d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.568185] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 6669c663-e5ca-4257-b7aa-f694b12f91d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.568185] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1046.568185] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1047.012406] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa08d64-2a43-4f93-a0e7-20133a4d1d41 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.021394] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-321fed29-217a-4ce1-a1ba-c8cbd88b258a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.055979] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7695b40f-b0ef-407b-8056-5b117c2c180f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.064913] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd37d21-6063-451f-9894-e94aad5a243f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.081655] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1047.093878] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1047.113893] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1047.114127] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.960s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1047.246773] env[68798]: DEBUG nova.network.neutron [req-65164f9a-6513-464c-9f5a-f19a185d8621 req-67dff84f-b053-4c5b-a4d8-dfe6a1968d15 service nova] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Updated VIF entry in instance network info cache for port 5346e211-53a2-41e4-8a70-3c9518cc8cb1. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1047.247238] env[68798]: DEBUG nova.network.neutron [req-65164f9a-6513-464c-9f5a-f19a185d8621 req-67dff84f-b053-4c5b-a4d8-dfe6a1968d15 service nova] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Updating instance_info_cache with network_info: [{"id": "9e8c895f-1659-4135-813a-247b63794212", "address": "fa:16:3e:21:80:63", "network": {"id": "d53c5ff5-9ed4-4e0b-bd98-9ee6f7d36d8e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-686976316", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.15", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "897bb38c932f48fdb39e4972cb8994a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "305ccd93-08cb-4658-845c-d9b64952daf7", "external-id": "nsx-vlan-transportzone-490", "segmentation_id": 490, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e8c895f-16", "ovs_interfaceid": "9e8c895f-1659-4135-813a-247b63794212", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5346e211-53a2-41e4-8a70-3c9518cc8cb1", "address": "fa:16:3e:ed:f7:27", "network": {"id": "b7d30f94-8a29-4935-8b29-d01301baceff", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-50618664", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.22", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "897bb38c932f48fdb39e4972cb8994a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f66f8375-4460-4acd-987b-acda72bfcf0d", "external-id": "nsx-vlan-transportzone-533", "segmentation_id": 533, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5346e211-53", "ovs_interfaceid": "5346e211-53a2-41e4-8a70-3c9518cc8cb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1047.265717] env[68798]: DEBUG oslo_concurrency.lockutils [req-65164f9a-6513-464c-9f5a-f19a185d8621 req-67dff84f-b053-4c5b-a4d8-dfe6a1968d15 service nova] Releasing lock "refresh_cache-a7060037-2580-464a-b434-90ffe7314bd1" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1052.656169] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bd15a999-fe5f-4609-8f60-18101b2be48e tempest-VolumesAdminNegativeTest-2011042876 tempest-VolumesAdminNegativeTest-2011042876-project-member] Acquiring lock "addf38a1-9a3f-4e4f-ae0a-011fa96b344a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1052.656791] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bd15a999-fe5f-4609-8f60-18101b2be48e tempest-VolumesAdminNegativeTest-2011042876 tempest-VolumesAdminNegativeTest-2011042876-project-member] Lock "addf38a1-9a3f-4e4f-ae0a-011fa96b344a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1056.836526] env[68798]: DEBUG oslo_concurrency.lockutils [None req-93b54225-d1e4-4e32-a08d-1cc1974965f4 tempest-AttachVolumeNegativeTest-740411461 tempest-AttachVolumeNegativeTest-740411461-project-member] Acquiring lock "be9d913d-aeb6-4ae9-baca-d1733e9e5734" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.836816] env[68798]: DEBUG oslo_concurrency.lockutils [None req-93b54225-d1e4-4e32-a08d-1cc1974965f4 tempest-AttachVolumeNegativeTest-740411461 tempest-AttachVolumeNegativeTest-740411461-project-member] Lock "be9d913d-aeb6-4ae9-baca-d1733e9e5734" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1060.402514] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4a2e845b-7987-4555-af95-45b421417b0a tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Acquiring lock "78d16017-fa1f-4d77-9111-55d37a1463d3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1060.402839] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4a2e845b-7987-4555-af95-45b421417b0a tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Lock "78d16017-fa1f-4d77-9111-55d37a1463d3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1061.953686] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8daa0e20-cebc-4e22-84cf-b29ac98ed8db tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Acquiring lock "8f21bf87-c969-40dc-bbdc-9b9c0302b3ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1061.953686] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8daa0e20-cebc-4e22-84cf-b29ac98ed8db tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Lock "8f21bf87-c969-40dc-bbdc-9b9c0302b3ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1065.972565] env[68798]: DEBUG oslo_concurrency.lockutils [None req-92993540-b01d-4bad-a7fc-8bc797bb49d6 tempest-ServerMetadataTestJSON-1279570147 tempest-ServerMetadataTestJSON-1279570147-project-member] Acquiring lock "6d29d382-0b65-4cce-a487-1e4096cb4907" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1065.972843] env[68798]: DEBUG oslo_concurrency.lockutils [None req-92993540-b01d-4bad-a7fc-8bc797bb49d6 tempest-ServerMetadataTestJSON-1279570147 tempest-ServerMetadataTestJSON-1279570147-project-member] Lock "6d29d382-0b65-4cce-a487-1e4096cb4907" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.349805] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0be1bef0-aa28-4213-a1fc-f04876a1f07c tempest-AttachInterfacesTestJSON-1343652290 tempest-AttachInterfacesTestJSON-1343652290-project-member] Acquiring lock "1a5e8081-1a86-4c91-8139-469b7825fc47" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.350162] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0be1bef0-aa28-4213-a1fc-f04876a1f07c tempest-AttachInterfacesTestJSON-1343652290 tempest-AttachInterfacesTestJSON-1343652290-project-member] Lock "1a5e8081-1a86-4c91-8139-469b7825fc47" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.207203] env[68798]: DEBUG oslo_concurrency.lockutils [None req-5f0e11eb-4ce2-48f2-ad45-55026832b1c4 tempest-AttachVolumeShelveTestJSON-1210845358 tempest-AttachVolumeShelveTestJSON-1210845358-project-member] Acquiring lock "66a828dc-4a00-49d8-944a-0c8a90d56219" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.207446] env[68798]: DEBUG oslo_concurrency.lockutils [None req-5f0e11eb-4ce2-48f2-ad45-55026832b1c4 tempest-AttachVolumeShelveTestJSON-1210845358 tempest-AttachVolumeShelveTestJSON-1210845358-project-member] Lock "66a828dc-4a00-49d8-944a-0c8a90d56219" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.226297] env[68798]: DEBUG oslo_concurrency.lockutils [None req-93be4323-1ede-4d85-ad9a-7c67057d49ce tempest-MultipleCreateTestJSON-252684274 tempest-MultipleCreateTestJSON-252684274-project-member] Acquiring lock "a4376aa4-f675-42e8-a908-a398ab8db455" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1083.226605] env[68798]: DEBUG oslo_concurrency.lockutils [None req-93be4323-1ede-4d85-ad9a-7c67057d49ce tempest-MultipleCreateTestJSON-252684274 tempest-MultipleCreateTestJSON-252684274-project-member] Lock "a4376aa4-f675-42e8-a908-a398ab8db455" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.255423] env[68798]: DEBUG oslo_concurrency.lockutils [None req-93be4323-1ede-4d85-ad9a-7c67057d49ce tempest-MultipleCreateTestJSON-252684274 tempest-MultipleCreateTestJSON-252684274-project-member] Acquiring lock "dab58ca2-ba8a-4f7d-acdd-dbb94b38ffbc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1083.256810] env[68798]: DEBUG oslo_concurrency.lockutils [None req-93be4323-1ede-4d85-ad9a-7c67057d49ce tempest-MultipleCreateTestJSON-252684274 tempest-MultipleCreateTestJSON-252684274-project-member] Lock "dab58ca2-ba8a-4f7d-acdd-dbb94b38ffbc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1086.767888] env[68798]: WARNING oslo_vmware.rw_handles [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1086.767888] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1086.767888] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1086.767888] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1086.767888] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1086.767888] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 1086.767888] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1086.767888] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1086.767888] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1086.767888] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1086.767888] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1086.767888] env[68798]: ERROR oslo_vmware.rw_handles [ 1086.768658] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/e506d716-6b02-481d-86c0-c41801d3d208/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1086.770974] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1086.771307] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Copying Virtual Disk [datastore1] vmware_temp/e506d716-6b02-481d-86c0-c41801d3d208/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/e506d716-6b02-481d-86c0-c41801d3d208/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1086.771611] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dd11854d-8c14-481f-b667-339fd96e4734 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.780415] env[68798]: DEBUG oslo_vmware.api [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Waiting for the task: (returnval){ [ 1086.780415] env[68798]: value = "task-4217598" [ 1086.780415] env[68798]: _type = "Task" [ 1086.780415] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.792043] env[68798]: DEBUG oslo_vmware.api [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Task: {'id': task-4217598, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.291529] env[68798]: DEBUG oslo_vmware.exceptions [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1087.291829] env[68798]: DEBUG oslo_concurrency.lockutils [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1087.292419] env[68798]: ERROR nova.compute.manager [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1087.292419] env[68798]: Faults: ['InvalidArgument'] [ 1087.292419] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Traceback (most recent call last): [ 1087.292419] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1087.292419] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] yield resources [ 1087.292419] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1087.292419] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] self.driver.spawn(context, instance, image_meta, [ 1087.292419] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1087.292419] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1087.292419] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1087.292419] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] self._fetch_image_if_missing(context, vi) [ 1087.292419] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1087.292419] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] image_cache(vi, tmp_image_ds_loc) [ 1087.292419] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1087.292419] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] vm_util.copy_virtual_disk( [ 1087.292419] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1087.292419] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] session._wait_for_task(vmdk_copy_task) [ 1087.292419] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1087.292419] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] return self.wait_for_task(task_ref) [ 1087.292419] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1087.292419] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] return evt.wait() [ 1087.292419] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1087.292419] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] result = hub.switch() [ 1087.292419] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1087.292419] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] return self.greenlet.switch() [ 1087.292419] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1087.292419] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] self.f(*self.args, **self.kw) [ 1087.292419] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1087.292419] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] raise exceptions.translate_fault(task_info.error) [ 1087.292419] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1087.292419] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Faults: ['InvalidArgument'] [ 1087.292419] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] [ 1087.293579] env[68798]: INFO nova.compute.manager [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Terminating instance [ 1087.294358] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.294566] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1087.295200] env[68798]: DEBUG nova.compute.manager [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1087.295387] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1087.295613] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0efe2c1c-6dad-46ca-aee5-cf82b929ec6e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.298244] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31fd37bd-4e29-446c-82ef-7141f67148ac {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.305018] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1087.305238] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c8118ef9-5f9b-4707-ab79-9cfc36a4aea3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.307447] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1087.307617] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1087.308611] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17a8c9cb-995f-4a1a-933e-1eb67891e2a3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.313550] env[68798]: DEBUG oslo_vmware.api [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Waiting for the task: (returnval){ [ 1087.313550] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]525a4bc0-1f72-c266-6a33-393ef63c9519" [ 1087.313550] env[68798]: _type = "Task" [ 1087.313550] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.323146] env[68798]: DEBUG oslo_vmware.api [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]525a4bc0-1f72-c266-6a33-393ef63c9519, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.375743] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1087.376037] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1087.376189] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Deleting the datastore file [datastore1] 1ecf18e5-a4a1-4efb-b54a-964b064b51e5 {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1087.376482] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3f958e2d-4a28-4a01-b5f2-6fae43f196ef {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.383749] env[68798]: DEBUG oslo_vmware.api [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Waiting for the task: (returnval){ [ 1087.383749] env[68798]: value = "task-4217600" [ 1087.383749] env[68798]: _type = "Task" [ 1087.383749] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.392201] env[68798]: DEBUG oslo_vmware.api [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Task: {'id': task-4217600, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.828035] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1087.828035] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Creating directory with path [datastore1] vmware_temp/2bf36039-3bd4-44b8-a6a1-9fce43ee94cf/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1087.828035] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f976246e-fca7-4a35-a71a-cc5a45397215 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.841242] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Created directory with path [datastore1] vmware_temp/2bf36039-3bd4-44b8-a6a1-9fce43ee94cf/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1087.841242] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Fetch image to [datastore1] vmware_temp/2bf36039-3bd4-44b8-a6a1-9fce43ee94cf/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1087.841242] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/2bf36039-3bd4-44b8-a6a1-9fce43ee94cf/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1087.841801] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-530b0fbe-f5f0-4c14-bb46-c861b38cb892 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.850275] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66e4b49b-9081-446c-b8b4-92836e9e223a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.859970] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f89ae6-b3ae-4330-baae-fb13adc7cea5 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.894242] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-531b71c9-7b3f-48bc-9377-e68942586f23 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.902401] env[68798]: DEBUG oslo_vmware.api [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Task: {'id': task-4217600, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077326} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.903971] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1087.904183] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1087.904361] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1087.904569] env[68798]: INFO nova.compute.manager [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1087.906439] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-63113448-a156-4e77-b244-04a3c38af40a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.908628] env[68798]: DEBUG nova.compute.claims [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1087.908807] env[68798]: DEBUG oslo_concurrency.lockutils [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1087.909038] env[68798]: DEBUG oslo_concurrency.lockutils [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1087.936392] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1087.989474] env[68798]: DEBUG oslo_vmware.rw_handles [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2bf36039-3bd4-44b8-a6a1-9fce43ee94cf/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1088.052186] env[68798]: DEBUG oslo_vmware.rw_handles [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1088.052386] env[68798]: DEBUG oslo_vmware.rw_handles [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2bf36039-3bd4-44b8-a6a1-9fce43ee94cf/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1088.348430] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98df25f9-b52d-46f8-bf31-e5731fe5b7f5 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.356795] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a9da2e9-4a44-4352-b964-0ebe3a1f0554 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.387522] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4316e121-60c1-406a-b810-7e812c48101a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.395647] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c818d7b-f395-4dc8-924b-2b6de805234f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.409708] env[68798]: DEBUG nova.compute.provider_tree [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1088.420706] env[68798]: DEBUG nova.scheduler.client.report [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1088.437949] env[68798]: DEBUG oslo_concurrency.lockutils [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.529s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1088.438499] env[68798]: ERROR nova.compute.manager [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1088.438499] env[68798]: Faults: ['InvalidArgument'] [ 1088.438499] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Traceback (most recent call last): [ 1088.438499] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1088.438499] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] self.driver.spawn(context, instance, image_meta, [ 1088.438499] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1088.438499] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1088.438499] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1088.438499] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] self._fetch_image_if_missing(context, vi) [ 1088.438499] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1088.438499] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] image_cache(vi, tmp_image_ds_loc) [ 1088.438499] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1088.438499] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] vm_util.copy_virtual_disk( [ 1088.438499] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1088.438499] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] session._wait_for_task(vmdk_copy_task) [ 1088.438499] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1088.438499] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] return self.wait_for_task(task_ref) [ 1088.438499] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1088.438499] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] return evt.wait() [ 1088.438499] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1088.438499] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] result = hub.switch() [ 1088.438499] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1088.438499] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] return self.greenlet.switch() [ 1088.438499] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1088.438499] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] self.f(*self.args, **self.kw) [ 1088.438499] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1088.438499] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] raise exceptions.translate_fault(task_info.error) [ 1088.438499] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1088.438499] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Faults: ['InvalidArgument'] [ 1088.438499] env[68798]: ERROR nova.compute.manager [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] [ 1088.439299] env[68798]: DEBUG nova.compute.utils [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1088.440882] env[68798]: DEBUG nova.compute.manager [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Build of instance 1ecf18e5-a4a1-4efb-b54a-964b064b51e5 was re-scheduled: A specified parameter was not correct: fileType [ 1088.440882] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1088.441227] env[68798]: DEBUG nova.compute.manager [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1088.441401] env[68798]: DEBUG nova.compute.manager [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1088.441554] env[68798]: DEBUG nova.compute.manager [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1088.441720] env[68798]: DEBUG nova.network.neutron [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1088.739031] env[68798]: DEBUG nova.network.neutron [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.754055] env[68798]: INFO nova.compute.manager [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Took 0.31 seconds to deallocate network for instance. [ 1088.870455] env[68798]: INFO nova.scheduler.client.report [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Deleted allocations for instance 1ecf18e5-a4a1-4efb-b54a-964b064b51e5 [ 1088.892895] env[68798]: DEBUG oslo_concurrency.lockutils [None req-eaab17a7-054e-4bd4-aaa5-5cc6d5b6eb7a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Lock "1ecf18e5-a4a1-4efb-b54a-964b064b51e5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 457.120s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1088.894158] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ec2934c8-d025-4c22-ba78-8ada6933e28a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Lock "1ecf18e5-a4a1-4efb-b54a-964b064b51e5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 256.368s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.894383] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ec2934c8-d025-4c22-ba78-8ada6933e28a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Acquiring lock "1ecf18e5-a4a1-4efb-b54a-964b064b51e5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1088.894600] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ec2934c8-d025-4c22-ba78-8ada6933e28a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Lock "1ecf18e5-a4a1-4efb-b54a-964b064b51e5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.894747] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ec2934c8-d025-4c22-ba78-8ada6933e28a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Lock "1ecf18e5-a4a1-4efb-b54a-964b064b51e5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1088.899632] env[68798]: INFO nova.compute.manager [None req-ec2934c8-d025-4c22-ba78-8ada6933e28a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Terminating instance [ 1088.904606] env[68798]: DEBUG nova.compute.manager [None req-ec2934c8-d025-4c22-ba78-8ada6933e28a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1088.904874] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ec2934c8-d025-4c22-ba78-8ada6933e28a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1088.905443] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6eb2d481-5b74-4bfc-9d33-19c7c40a2dad {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.908514] env[68798]: DEBUG nova.compute.manager [None req-6ff0bab0-960d-4654-8425-e8e890bd4749 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 40be8e0f-88ab-43bc-9923-5e9e478c1cdf] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1088.919032] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0459d5a2-c580-451c-b52a-284e709cdfe2 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.938080] env[68798]: DEBUG nova.compute.manager [None req-6ff0bab0-960d-4654-8425-e8e890bd4749 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] [instance: 40be8e0f-88ab-43bc-9923-5e9e478c1cdf] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1088.952048] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-ec2934c8-d025-4c22-ba78-8ada6933e28a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1ecf18e5-a4a1-4efb-b54a-964b064b51e5 could not be found. [ 1088.952202] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ec2934c8-d025-4c22-ba78-8ada6933e28a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1088.952368] env[68798]: INFO nova.compute.manager [None req-ec2934c8-d025-4c22-ba78-8ada6933e28a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1088.952631] env[68798]: DEBUG oslo.service.loopingcall [None req-ec2934c8-d025-4c22-ba78-8ada6933e28a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1088.952874] env[68798]: DEBUG nova.compute.manager [-] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1088.952973] env[68798]: DEBUG nova.network.neutron [-] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1088.978059] env[68798]: DEBUG nova.network.neutron [-] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1088.987523] env[68798]: INFO nova.compute.manager [-] [instance: 1ecf18e5-a4a1-4efb-b54a-964b064b51e5] Took 0.03 seconds to deallocate network for instance. [ 1088.999762] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6ff0bab0-960d-4654-8425-e8e890bd4749 tempest-MigrationsAdminTest-1554767835 tempest-MigrationsAdminTest-1554767835-project-member] Lock "40be8e0f-88ab-43bc-9923-5e9e478c1cdf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.268s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.009889] env[68798]: DEBUG nova.compute.manager [None req-e602cc45-9637-4728-a8d8-a271de573a73 tempest-ServersAaction247Test-1501939504 tempest-ServersAaction247Test-1501939504-project-member] [instance: e22852fa-7480-4761-8cd2-1371d6cb1410] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1089.042360] env[68798]: DEBUG nova.compute.manager [None req-e602cc45-9637-4728-a8d8-a271de573a73 tempest-ServersAaction247Test-1501939504 tempest-ServersAaction247Test-1501939504-project-member] [instance: e22852fa-7480-4761-8cd2-1371d6cb1410] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1089.064169] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e602cc45-9637-4728-a8d8-a271de573a73 tempest-ServersAaction247Test-1501939504 tempest-ServersAaction247Test-1501939504-project-member] Lock "e22852fa-7480-4761-8cd2-1371d6cb1410" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.300s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.077362] env[68798]: DEBUG nova.compute.manager [None req-7d9b0a26-7ccf-4863-9ae1-303f60ee2a72 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ca8f61e2-513d-48a0-aebd-18507eccd99d] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1089.084946] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ec2934c8-d025-4c22-ba78-8ada6933e28a tempest-TenantUsagesTestJSON-1865366222 tempest-TenantUsagesTestJSON-1865366222-project-member] Lock "1ecf18e5-a4a1-4efb-b54a-964b064b51e5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.191s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.107189] env[68798]: DEBUG nova.compute.manager [None req-7d9b0a26-7ccf-4863-9ae1-303f60ee2a72 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ca8f61e2-513d-48a0-aebd-18507eccd99d] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1089.130286] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7d9b0a26-7ccf-4863-9ae1-303f60ee2a72 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Lock "ca8f61e2-513d-48a0-aebd-18507eccd99d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.857s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.140761] env[68798]: DEBUG nova.compute.manager [None req-ba34f1bc-0224-4b5f-be2e-26c4e036422a tempest-InstanceActionsTestJSON-1321523912 tempest-InstanceActionsTestJSON-1321523912-project-member] [instance: dd8c777d-d724-4f7c-9516-448c4b2abb5f] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1089.184460] env[68798]: DEBUG nova.compute.manager [None req-ba34f1bc-0224-4b5f-be2e-26c4e036422a tempest-InstanceActionsTestJSON-1321523912 tempest-InstanceActionsTestJSON-1321523912-project-member] [instance: dd8c777d-d724-4f7c-9516-448c4b2abb5f] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1089.206777] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ba34f1bc-0224-4b5f-be2e-26c4e036422a tempest-InstanceActionsTestJSON-1321523912 tempest-InstanceActionsTestJSON-1321523912-project-member] Lock "dd8c777d-d724-4f7c-9516-448c4b2abb5f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.449s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.217049] env[68798]: DEBUG nova.compute.manager [None req-bdbabf8c-dbfb-43f5-a7f2-3862f5700175 tempest-ServerActionsTestJSON-1499212270 tempest-ServerActionsTestJSON-1499212270-project-member] [instance: c9185203-eefd-455a-ba91-ec9797db792e] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1089.244382] env[68798]: DEBUG nova.compute.manager [None req-bdbabf8c-dbfb-43f5-a7f2-3862f5700175 tempest-ServerActionsTestJSON-1499212270 tempest-ServerActionsTestJSON-1499212270-project-member] [instance: c9185203-eefd-455a-ba91-ec9797db792e] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1089.270290] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bdbabf8c-dbfb-43f5-a7f2-3862f5700175 tempest-ServerActionsTestJSON-1499212270 tempest-ServerActionsTestJSON-1499212270-project-member] Lock "c9185203-eefd-455a-ba91-ec9797db792e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 223.475s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.280179] env[68798]: DEBUG nova.compute.manager [None req-ca275a74-07ed-4f4e-971e-100dba602961 tempest-ServersV294TestFqdnHostnames-1834906833 tempest-ServersV294TestFqdnHostnames-1834906833-project-member] [instance: d5f5af2a-c638-4abf-87f7-3fcb6ee0bcdd] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1089.306346] env[68798]: DEBUG nova.compute.manager [None req-ca275a74-07ed-4f4e-971e-100dba602961 tempest-ServersV294TestFqdnHostnames-1834906833 tempest-ServersV294TestFqdnHostnames-1834906833-project-member] [instance: d5f5af2a-c638-4abf-87f7-3fcb6ee0bcdd] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1089.329514] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ca275a74-07ed-4f4e-971e-100dba602961 tempest-ServersV294TestFqdnHostnames-1834906833 tempest-ServersV294TestFqdnHostnames-1834906833-project-member] Lock "d5f5af2a-c638-4abf-87f7-3fcb6ee0bcdd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.382s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.340470] env[68798]: DEBUG nova.compute.manager [None req-fee39f75-cceb-4822-942a-5dae54a69cca tempest-ServerDiagnosticsTest-1659991863 tempest-ServerDiagnosticsTest-1659991863-project-member] [instance: 28ba427d-0034-41e3-b474-eab0eb3c794e] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1089.377784] env[68798]: DEBUG nova.compute.manager [None req-fee39f75-cceb-4822-942a-5dae54a69cca tempest-ServerDiagnosticsTest-1659991863 tempest-ServerDiagnosticsTest-1659991863-project-member] [instance: 28ba427d-0034-41e3-b474-eab0eb3c794e] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1089.401388] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fee39f75-cceb-4822-942a-5dae54a69cca tempest-ServerDiagnosticsTest-1659991863 tempest-ServerDiagnosticsTest-1659991863-project-member] Lock "28ba427d-0034-41e3-b474-eab0eb3c794e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.138s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.411460] env[68798]: DEBUG nova.compute.manager [None req-2309fdb9-7be8-4a10-af8d-0a2af6ec3186 tempest-SecurityGroupsTestJSON-1301535814 tempest-SecurityGroupsTestJSON-1301535814-project-member] [instance: 63902532-8a21-4dbe-8315-ef6c45f88859] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1089.435519] env[68798]: DEBUG nova.compute.manager [None req-2309fdb9-7be8-4a10-af8d-0a2af6ec3186 tempest-SecurityGroupsTestJSON-1301535814 tempest-SecurityGroupsTestJSON-1301535814-project-member] [instance: 63902532-8a21-4dbe-8315-ef6c45f88859] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1089.456889] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2309fdb9-7be8-4a10-af8d-0a2af6ec3186 tempest-SecurityGroupsTestJSON-1301535814 tempest-SecurityGroupsTestJSON-1301535814-project-member] Lock "63902532-8a21-4dbe-8315-ef6c45f88859" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 214.720s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.465885] env[68798]: DEBUG nova.compute.manager [None req-512673bb-5670-4d46-ab51-04f1d6fa630f tempest-AttachInterfacesTestJSON-1343652290 tempest-AttachInterfacesTestJSON-1343652290-project-member] [instance: d5d1053f-a0c0-4b93-aef4-60b6ca0dc1ef] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1089.489802] env[68798]: DEBUG nova.compute.manager [None req-512673bb-5670-4d46-ab51-04f1d6fa630f tempest-AttachInterfacesTestJSON-1343652290 tempest-AttachInterfacesTestJSON-1343652290-project-member] [instance: d5d1053f-a0c0-4b93-aef4-60b6ca0dc1ef] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1089.510055] env[68798]: DEBUG oslo_concurrency.lockutils [None req-512673bb-5670-4d46-ab51-04f1d6fa630f tempest-AttachInterfacesTestJSON-1343652290 tempest-AttachInterfacesTestJSON-1343652290-project-member] Lock "d5d1053f-a0c0-4b93-aef4-60b6ca0dc1ef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.955s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1089.519225] env[68798]: DEBUG nova.compute.manager [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1089.580280] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1089.580591] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.582306] env[68798]: INFO nova.compute.claims [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1089.942421] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c37a815-9668-44f4-88fe-dcad2924c979 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.951629] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9984e0a7-b90f-4116-9099-a46c38a73ee3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.982026] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e6ad55f-9ba7-4664-91c3-47975d3cdc00 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.989742] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e63ed0d1-a6e1-4b31-8319-e7690d2bc492 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.003542] env[68798]: DEBUG nova.compute.provider_tree [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1090.012765] env[68798]: DEBUG nova.scheduler.client.report [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1090.025700] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.445s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1090.029147] env[68798]: DEBUG nova.compute.manager [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1090.058468] env[68798]: DEBUG nova.compute.utils [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1090.060486] env[68798]: DEBUG nova.compute.manager [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1090.061238] env[68798]: DEBUG nova.network.neutron [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1090.070022] env[68798]: DEBUG nova.compute.manager [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1090.117627] env[68798]: DEBUG nova.policy [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '29675afe209e492bab415ced0980c8c8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fa02dd31bcb54b1dbdb30faa9f85b49e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 1090.140081] env[68798]: DEBUG nova.compute.manager [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1090.164917] env[68798]: DEBUG nova.virt.hardware [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1090.165192] env[68798]: DEBUG nova.virt.hardware [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1090.165352] env[68798]: DEBUG nova.virt.hardware [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1090.165533] env[68798]: DEBUG nova.virt.hardware [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1090.165710] env[68798]: DEBUG nova.virt.hardware [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1090.165882] env[68798]: DEBUG nova.virt.hardware [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1090.166111] env[68798]: DEBUG nova.virt.hardware [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1090.166273] env[68798]: DEBUG nova.virt.hardware [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1090.166441] env[68798]: DEBUG nova.virt.hardware [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1090.166607] env[68798]: DEBUG nova.virt.hardware [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1090.166919] env[68798]: DEBUG nova.virt.hardware [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1090.167654] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f7fb0f-fdae-476c-8472-ec35625d2aa2 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.176127] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3cf6b08-eadc-47d6-8489-04f79dbb2f26 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.591051] env[68798]: DEBUG nova.network.neutron [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Successfully created port: 717a24d1-6759-42c3-a508-265daaeb22d5 {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1091.467811] env[68798]: DEBUG nova.compute.manager [req-94e308d2-f2c0-4e35-877c-953de2b708e9 req-4123c053-4981-49cd-82e6-6950d36e89b0 service nova] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Received event network-vif-plugged-717a24d1-6759-42c3-a508-265daaeb22d5 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1091.468169] env[68798]: DEBUG oslo_concurrency.lockutils [req-94e308d2-f2c0-4e35-877c-953de2b708e9 req-4123c053-4981-49cd-82e6-6950d36e89b0 service nova] Acquiring lock "1ae2e411-d8e4-4abb-8c7b-b907ebba094c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.468342] env[68798]: DEBUG oslo_concurrency.lockutils [req-94e308d2-f2c0-4e35-877c-953de2b708e9 req-4123c053-4981-49cd-82e6-6950d36e89b0 service nova] Lock "1ae2e411-d8e4-4abb-8c7b-b907ebba094c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.468508] env[68798]: DEBUG oslo_concurrency.lockutils [req-94e308d2-f2c0-4e35-877c-953de2b708e9 req-4123c053-4981-49cd-82e6-6950d36e89b0 service nova] Lock "1ae2e411-d8e4-4abb-8c7b-b907ebba094c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.468729] env[68798]: DEBUG nova.compute.manager [req-94e308d2-f2c0-4e35-877c-953de2b708e9 req-4123c053-4981-49cd-82e6-6950d36e89b0 service nova] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] No waiting events found dispatching network-vif-plugged-717a24d1-6759-42c3-a508-265daaeb22d5 {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1091.468925] env[68798]: WARNING nova.compute.manager [req-94e308d2-f2c0-4e35-877c-953de2b708e9 req-4123c053-4981-49cd-82e6-6950d36e89b0 service nova] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Received unexpected event network-vif-plugged-717a24d1-6759-42c3-a508-265daaeb22d5 for instance with vm_state building and task_state spawning. [ 1091.581292] env[68798]: DEBUG nova.network.neutron [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Successfully updated port: 717a24d1-6759-42c3-a508-265daaeb22d5 {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1091.593393] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Acquiring lock "refresh_cache-1ae2e411-d8e4-4abb-8c7b-b907ebba094c" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1091.593555] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Acquired lock "refresh_cache-1ae2e411-d8e4-4abb-8c7b-b907ebba094c" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.593713] env[68798]: DEBUG nova.network.neutron [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1091.656970] env[68798]: DEBUG nova.network.neutron [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1091.854274] env[68798]: DEBUG nova.network.neutron [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Updating instance_info_cache with network_info: [{"id": "717a24d1-6759-42c3-a508-265daaeb22d5", "address": "fa:16:3e:b8:01:78", "network": {"id": "a0ec6568-7509-46b2-a9ac-c61ab2656b5c", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1634401284-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fa02dd31bcb54b1dbdb30faa9f85b49e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4b6ddb2-2e19-4031-9b22-add90d41a114", "external-id": "nsx-vlan-transportzone-921", "segmentation_id": 921, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap717a24d1-67", "ovs_interfaceid": "717a24d1-6759-42c3-a508-265daaeb22d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.866935] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Releasing lock "refresh_cache-1ae2e411-d8e4-4abb-8c7b-b907ebba094c" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1091.867320] env[68798]: DEBUG nova.compute.manager [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Instance network_info: |[{"id": "717a24d1-6759-42c3-a508-265daaeb22d5", "address": "fa:16:3e:b8:01:78", "network": {"id": "a0ec6568-7509-46b2-a9ac-c61ab2656b5c", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1634401284-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fa02dd31bcb54b1dbdb30faa9f85b49e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4b6ddb2-2e19-4031-9b22-add90d41a114", "external-id": "nsx-vlan-transportzone-921", "segmentation_id": 921, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap717a24d1-67", "ovs_interfaceid": "717a24d1-6759-42c3-a508-265daaeb22d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1091.867804] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:01:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a4b6ddb2-2e19-4031-9b22-add90d41a114', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '717a24d1-6759-42c3-a508-265daaeb22d5', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1091.875587] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Creating folder: Project (fa02dd31bcb54b1dbdb30faa9f85b49e). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1091.876204] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-808050da-475d-471e-85f6-8ce22a5fda83 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.888033] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Created folder: Project (fa02dd31bcb54b1dbdb30faa9f85b49e) in parent group-v834492. [ 1091.888289] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Creating folder: Instances. Parent ref: group-v834550. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1091.888630] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1c21a9dc-9277-4190-a2f5-a23d90831e57 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.899398] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Created folder: Instances in parent group-v834550. [ 1091.899717] env[68798]: DEBUG oslo.service.loopingcall [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1091.899952] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1091.900218] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2741d0a7-e2cf-4e58-9357-5bf2f59dafd8 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.921077] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1091.921077] env[68798]: value = "task-4217603" [ 1091.921077] env[68798]: _type = "Task" [ 1091.921077] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.927816] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217603, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.430213] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217603, 'name': CreateVM_Task, 'duration_secs': 0.310018} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.430408] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1092.431114] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1092.431285] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.431615] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1092.431885] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f05a916-a507-4e88-871e-7eb1616f0f7a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.437284] env[68798]: DEBUG oslo_vmware.api [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Waiting for the task: (returnval){ [ 1092.437284] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]522e274d-b762-13ad-991f-e0cd65a9817a" [ 1092.437284] env[68798]: _type = "Task" [ 1092.437284] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.444882] env[68798]: DEBUG oslo_vmware.api [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]522e274d-b762-13ad-991f-e0cd65a9817a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.948288] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1092.948814] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1092.948814] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1093.494497] env[68798]: DEBUG nova.compute.manager [req-578e36c0-f64c-49a1-ab89-11bf849e3941 req-c2dc6971-ee39-40d7-ab07-62e4705958db service nova] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Received event network-changed-717a24d1-6759-42c3-a508-265daaeb22d5 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1093.494697] env[68798]: DEBUG nova.compute.manager [req-578e36c0-f64c-49a1-ab89-11bf849e3941 req-c2dc6971-ee39-40d7-ab07-62e4705958db service nova] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Refreshing instance network info cache due to event network-changed-717a24d1-6759-42c3-a508-265daaeb22d5. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1093.494908] env[68798]: DEBUG oslo_concurrency.lockutils [req-578e36c0-f64c-49a1-ab89-11bf849e3941 req-c2dc6971-ee39-40d7-ab07-62e4705958db service nova] Acquiring lock "refresh_cache-1ae2e411-d8e4-4abb-8c7b-b907ebba094c" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1093.495063] env[68798]: DEBUG oslo_concurrency.lockutils [req-578e36c0-f64c-49a1-ab89-11bf849e3941 req-c2dc6971-ee39-40d7-ab07-62e4705958db service nova] Acquired lock "refresh_cache-1ae2e411-d8e4-4abb-8c7b-b907ebba094c" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.495228] env[68798]: DEBUG nova.network.neutron [req-578e36c0-f64c-49a1-ab89-11bf849e3941 req-c2dc6971-ee39-40d7-ab07-62e4705958db service nova] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Refreshing network info cache for port 717a24d1-6759-42c3-a508-265daaeb22d5 {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1093.778428] env[68798]: DEBUG nova.network.neutron [req-578e36c0-f64c-49a1-ab89-11bf849e3941 req-c2dc6971-ee39-40d7-ab07-62e4705958db service nova] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Updated VIF entry in instance network info cache for port 717a24d1-6759-42c3-a508-265daaeb22d5. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1093.778831] env[68798]: DEBUG nova.network.neutron [req-578e36c0-f64c-49a1-ab89-11bf849e3941 req-c2dc6971-ee39-40d7-ab07-62e4705958db service nova] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Updating instance_info_cache with network_info: [{"id": "717a24d1-6759-42c3-a508-265daaeb22d5", "address": "fa:16:3e:b8:01:78", "network": {"id": "a0ec6568-7509-46b2-a9ac-c61ab2656b5c", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1634401284-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fa02dd31bcb54b1dbdb30faa9f85b49e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4b6ddb2-2e19-4031-9b22-add90d41a114", "external-id": "nsx-vlan-transportzone-921", "segmentation_id": 921, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap717a24d1-67", "ovs_interfaceid": "717a24d1-6759-42c3-a508-265daaeb22d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.789249] env[68798]: DEBUG oslo_concurrency.lockutils [req-578e36c0-f64c-49a1-ab89-11bf849e3941 req-c2dc6971-ee39-40d7-ab07-62e4705958db service nova] Releasing lock "refresh_cache-1ae2e411-d8e4-4abb-8c7b-b907ebba094c" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1098.741958] env[68798]: DEBUG oslo_concurrency.lockutils [None req-787d79de-1b68-4995-8e93-c460b395089e tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Acquiring lock "1ae2e411-d8e4-4abb-8c7b-b907ebba094c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1100.115336] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1101.048492] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1101.050586] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1101.050586] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1101.078850] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1101.078850] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1101.078850] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1101.078850] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1101.078850] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1101.078850] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1101.078850] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1101.078850] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1101.078850] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1101.078850] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1101.078850] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1101.078850] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1101.078850] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.049290] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.049290] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.048189] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.213700] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Acquiring lock "da5d9023-f6c1-44f8-9465-36aa2b109924" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1104.216037] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Lock "da5d9023-f6c1-44f8-9465-36aa2b109924" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1106.049042] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1106.049543] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1107.048470] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1107.063931] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1107.064848] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1107.064848] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1107.065141] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1107.066611] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ac4368-35cc-422b-aec9-0ae2e52b2570 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.077554] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f882bcac-b5f8-430f-83dd-8c16046e1289 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.093137] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9983e62f-8a74-4963-b7fe-169428d9f207 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.100527] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c0fddd1-ff1e-4746-b479-6e87fdd4abe5 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.130615] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180758MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1107.130786] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1107.131021] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1107.223539] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 3bf7d713-8315-48d9-85dd-4ff09c9c7782 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1107.223837] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance cdb141da-a05c-4891-a33d-6e12eafe4f22 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1107.223837] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 551707b9-118e-45c8-a28f-e70486272f6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1107.223977] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 89f660c8-6efd-4789-90ee-67e42abc1db7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1107.224085] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 9923a3c7-f090-4a01-8c57-36c8c22c6b14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1107.224210] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1107.224325] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e848c3f4-64ff-4956-88e0-afa27be73068 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1107.224439] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 30e8027d-98b3-4a5f-9eb4-244846cb90e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1107.224551] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance a7060037-2580-464a-b434-90ffe7314bd1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1107.224663] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 1ae2e411-d8e4-4abb-8c7b-b907ebba094c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1107.236258] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1107.247364] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 5e53196f-984a-4d72-8e00-861ef0751dca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1107.264342] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 02e3ae68-7367-45db-9a2f-01a2e9f703ff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1107.279124] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 3a185352-1cdb-4aa1-b163-abc6e712690e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1107.290782] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance be335186-1418-480e-a213-dbe877aa1488 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1107.303504] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 7bea1932-0490-409b-99b0-bd1f3f1a9d5d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1107.315723] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 6669c663-e5ca-4257-b7aa-f694b12f91d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1107.330804] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance addf38a1-9a3f-4e4f-ae0a-011fa96b344a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1107.344097] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance be9d913d-aeb6-4ae9-baca-d1733e9e5734 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1107.355381] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 8f21bf87-c969-40dc-bbdc-9b9c0302b3ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1107.371502] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 6d29d382-0b65-4cce-a487-1e4096cb4907 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1107.386445] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 1a5e8081-1a86-4c91-8139-469b7825fc47 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1107.399507] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 66a828dc-4a00-49d8-944a-0c8a90d56219 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1107.411569] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance a4376aa4-f675-42e8-a908-a398ab8db455 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1107.426130] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance dab58ca2-ba8a-4f7d-acdd-dbb94b38ffbc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1107.445105] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance da5d9023-f6c1-44f8-9465-36aa2b109924 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1107.445380] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1107.445526] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1107.818194] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ce0412-3ceb-499d-a982-cf52e4877129 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.826538] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfabe2bf-e0fc-467f-858f-03fd49541565 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.859307] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b1a481-7975-4f9e-9f75-c10e07f7023d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.868892] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a631125c-1a55-4d3a-ae57-777476446cfc {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.882716] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1107.895097] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1107.917502] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1107.921019] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.787s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1108.680955] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cb289113-67bc-4f65-ba5c-caff52187dd5 tempest-AttachInterfacesUnderV243Test-1049645439 tempest-AttachInterfacesUnderV243Test-1049645439-project-member] Acquiring lock "a2cee0f3-08b3-4a25-9d5e-9760604ff948" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1108.681313] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cb289113-67bc-4f65-ba5c-caff52187dd5 tempest-AttachInterfacesUnderV243Test-1049645439 tempest-AttachInterfacesUnderV243Test-1049645439-project-member] Lock "a2cee0f3-08b3-4a25-9d5e-9760604ff948" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1135.511795] env[68798]: WARNING oslo_vmware.rw_handles [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1135.511795] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1135.511795] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1135.511795] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1135.511795] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1135.511795] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 1135.511795] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1135.511795] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1135.511795] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1135.511795] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1135.511795] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1135.511795] env[68798]: ERROR oslo_vmware.rw_handles [ 1135.512460] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/2bf36039-3bd4-44b8-a6a1-9fce43ee94cf/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1135.514284] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1135.514568] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Copying Virtual Disk [datastore1] vmware_temp/2bf36039-3bd4-44b8-a6a1-9fce43ee94cf/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/2bf36039-3bd4-44b8-a6a1-9fce43ee94cf/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1135.514840] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e433caa5-be31-42a7-9199-007aecfe4a62 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.524264] env[68798]: DEBUG oslo_vmware.api [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Waiting for the task: (returnval){ [ 1135.524264] env[68798]: value = "task-4217604" [ 1135.524264] env[68798]: _type = "Task" [ 1135.524264] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.532791] env[68798]: DEBUG oslo_vmware.api [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Task: {'id': task-4217604, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.035451] env[68798]: DEBUG oslo_vmware.exceptions [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1136.035744] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1136.036449] env[68798]: ERROR nova.compute.manager [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1136.036449] env[68798]: Faults: ['InvalidArgument'] [ 1136.036449] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Traceback (most recent call last): [ 1136.036449] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1136.036449] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] yield resources [ 1136.036449] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1136.036449] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] self.driver.spawn(context, instance, image_meta, [ 1136.036449] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1136.036449] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1136.036449] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1136.036449] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] self._fetch_image_if_missing(context, vi) [ 1136.036449] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1136.036449] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] image_cache(vi, tmp_image_ds_loc) [ 1136.036449] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1136.036449] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] vm_util.copy_virtual_disk( [ 1136.036449] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1136.036449] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] session._wait_for_task(vmdk_copy_task) [ 1136.036449] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1136.036449] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] return self.wait_for_task(task_ref) [ 1136.036449] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1136.036449] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] return evt.wait() [ 1136.036449] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1136.036449] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] result = hub.switch() [ 1136.036449] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1136.036449] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] return self.greenlet.switch() [ 1136.036449] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1136.036449] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] self.f(*self.args, **self.kw) [ 1136.036449] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1136.036449] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] raise exceptions.translate_fault(task_info.error) [ 1136.036449] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1136.036449] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Faults: ['InvalidArgument'] [ 1136.036449] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] [ 1136.037440] env[68798]: INFO nova.compute.manager [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Terminating instance [ 1136.038460] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.039633] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1136.040314] env[68798]: DEBUG nova.compute.manager [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1136.040510] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1136.040754] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b5209441-e3ab-4d7f-ac29-1a4a62dab5fa {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.043338] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fb2f8c5-eaf2-467f-8f9e-a991aff94bf2 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.051203] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1136.051528] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8cb176c9-8d8d-43e4-8a05-a1d4658c5835 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.054582] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1136.054793] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1136.055818] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c90203d-f290-47e1-a343-532e715eff06 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.061703] env[68798]: DEBUG oslo_vmware.api [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Waiting for the task: (returnval){ [ 1136.061703] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]5296f36b-3e21-835c-49c5-8b82e2bec7b9" [ 1136.061703] env[68798]: _type = "Task" [ 1136.061703] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.072057] env[68798]: DEBUG oslo_vmware.api [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]5296f36b-3e21-835c-49c5-8b82e2bec7b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.492116] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1136.492412] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1136.492643] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Deleting the datastore file [datastore1] 3bf7d713-8315-48d9-85dd-4ff09c9c7782 {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1136.492975] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-15995376-fc52-4244-8032-3460a5616ffd {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.499660] env[68798]: DEBUG oslo_vmware.api [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Waiting for the task: (returnval){ [ 1136.499660] env[68798]: value = "task-4217606" [ 1136.499660] env[68798]: _type = "Task" [ 1136.499660] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.508360] env[68798]: DEBUG oslo_vmware.api [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Task: {'id': task-4217606, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.572714] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1136.573248] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Creating directory with path [datastore1] vmware_temp/9f200068-5876-45a2-81de-897e774fc734/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1136.573374] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7515ae71-ba1d-47f4-a6ab-635e47896dfa {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.595924] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Created directory with path [datastore1] vmware_temp/9f200068-5876-45a2-81de-897e774fc734/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1136.595924] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Fetch image to [datastore1] vmware_temp/9f200068-5876-45a2-81de-897e774fc734/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1136.595924] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/9f200068-5876-45a2-81de-897e774fc734/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1136.596789] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c79fb6-c034-450c-ab88-c4fb2cc77ec0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.604445] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a03fdced-728e-4f94-90b4-52faae31d1f1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.613801] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-281080d4-cb38-4a05-bc43-fedbed5c0492 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.645965] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b811719f-198b-4c3a-bf5a-7eb641fc34f0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.652538] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-518236e7-8201-4bea-b6c7-4ee1e21984c6 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.682791] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1136.738984] env[68798]: DEBUG oslo_vmware.rw_handles [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9f200068-5876-45a2-81de-897e774fc734/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1136.798188] env[68798]: DEBUG oslo_vmware.rw_handles [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1136.798348] env[68798]: DEBUG oslo_vmware.rw_handles [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9f200068-5876-45a2-81de-897e774fc734/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1137.010159] env[68798]: DEBUG oslo_vmware.api [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Task: {'id': task-4217606, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091925} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.010416] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1137.010604] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1137.010777] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1137.010955] env[68798]: INFO nova.compute.manager [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Took 0.97 seconds to destroy the instance on the hypervisor. [ 1137.013331] env[68798]: DEBUG nova.compute.claims [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1137.013509] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1137.013725] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1137.406734] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-002e236f-859c-4bd8-907a-cac72e9b8c0a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.416055] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef38555-aa8a-471d-af17-144713c1e294 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.447148] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d11cf7-3d46-4b60-ae7a-d009bdbe14f8 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.455554] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb118ae0-c86d-4dc3-8fd8-4bc83e678855 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.470810] env[68798]: DEBUG nova.compute.provider_tree [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1137.481977] env[68798]: DEBUG nova.scheduler.client.report [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1137.499336] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.485s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1137.499936] env[68798]: ERROR nova.compute.manager [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1137.499936] env[68798]: Faults: ['InvalidArgument'] [ 1137.499936] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Traceback (most recent call last): [ 1137.499936] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1137.499936] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] self.driver.spawn(context, instance, image_meta, [ 1137.499936] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1137.499936] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1137.499936] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1137.499936] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] self._fetch_image_if_missing(context, vi) [ 1137.499936] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1137.499936] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] image_cache(vi, tmp_image_ds_loc) [ 1137.499936] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1137.499936] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] vm_util.copy_virtual_disk( [ 1137.499936] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1137.499936] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] session._wait_for_task(vmdk_copy_task) [ 1137.499936] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1137.499936] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] return self.wait_for_task(task_ref) [ 1137.499936] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1137.499936] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] return evt.wait() [ 1137.499936] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1137.499936] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] result = hub.switch() [ 1137.499936] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1137.499936] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] return self.greenlet.switch() [ 1137.499936] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1137.499936] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] self.f(*self.args, **self.kw) [ 1137.499936] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1137.499936] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] raise exceptions.translate_fault(task_info.error) [ 1137.499936] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1137.499936] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Faults: ['InvalidArgument'] [ 1137.499936] env[68798]: ERROR nova.compute.manager [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] [ 1137.500764] env[68798]: DEBUG nova.compute.utils [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1137.502215] env[68798]: DEBUG nova.compute.manager [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Build of instance 3bf7d713-8315-48d9-85dd-4ff09c9c7782 was re-scheduled: A specified parameter was not correct: fileType [ 1137.502215] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1137.502586] env[68798]: DEBUG nova.compute.manager [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1137.502758] env[68798]: DEBUG nova.compute.manager [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1137.502941] env[68798]: DEBUG nova.compute.manager [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1137.503118] env[68798]: DEBUG nova.network.neutron [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1137.933164] env[68798]: DEBUG nova.network.neutron [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1137.951809] env[68798]: INFO nova.compute.manager [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Took 0.45 seconds to deallocate network for instance. [ 1138.084274] env[68798]: INFO nova.scheduler.client.report [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Deleted allocations for instance 3bf7d713-8315-48d9-85dd-4ff09c9c7782 [ 1138.108046] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c1826d9b-b11b-41d8-8bdd-455d643a99df tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Lock "3bf7d713-8315-48d9-85dd-4ff09c9c7782" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 499.340s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.109431] env[68798]: DEBUG oslo_concurrency.lockutils [None req-69c0c6d2-66cb-4f02-a51d-9374d1edf910 tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Lock "3bf7d713-8315-48d9-85dd-4ff09c9c7782" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 301.492s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1138.110126] env[68798]: DEBUG oslo_concurrency.lockutils [None req-69c0c6d2-66cb-4f02-a51d-9374d1edf910 tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Acquiring lock "3bf7d713-8315-48d9-85dd-4ff09c9c7782-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1138.110126] env[68798]: DEBUG oslo_concurrency.lockutils [None req-69c0c6d2-66cb-4f02-a51d-9374d1edf910 tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Lock "3bf7d713-8315-48d9-85dd-4ff09c9c7782-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1138.110126] env[68798]: DEBUG oslo_concurrency.lockutils [None req-69c0c6d2-66cb-4f02-a51d-9374d1edf910 tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Lock "3bf7d713-8315-48d9-85dd-4ff09c9c7782-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.112218] env[68798]: INFO nova.compute.manager [None req-69c0c6d2-66cb-4f02-a51d-9374d1edf910 tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Terminating instance [ 1138.114141] env[68798]: DEBUG nova.compute.manager [None req-69c0c6d2-66cb-4f02-a51d-9374d1edf910 tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1138.114360] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-69c0c6d2-66cb-4f02-a51d-9374d1edf910 tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1138.114974] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d91560e7-ce43-401e-9300-a5d99caaf134 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.127459] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc5b11d-5b38-44dd-a04b-df0fbae08f07 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.146073] env[68798]: DEBUG nova.compute.manager [None req-bdc8455a-855c-4119-bb4a-a15628227e40 tempest-FloatingIPsAssociationTestJSON-1950445159 tempest-FloatingIPsAssociationTestJSON-1950445159-project-member] [instance: fd8e2ed6-f349-48a5-88db-54dd535587cf] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1138.173302] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-69c0c6d2-66cb-4f02-a51d-9374d1edf910 tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3bf7d713-8315-48d9-85dd-4ff09c9c7782 could not be found. [ 1138.174045] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-69c0c6d2-66cb-4f02-a51d-9374d1edf910 tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1138.174045] env[68798]: INFO nova.compute.manager [None req-69c0c6d2-66cb-4f02-a51d-9374d1edf910 tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1138.174045] env[68798]: DEBUG oslo.service.loopingcall [None req-69c0c6d2-66cb-4f02-a51d-9374d1edf910 tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1138.174247] env[68798]: DEBUG nova.compute.manager [-] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1138.174319] env[68798]: DEBUG nova.network.neutron [-] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1138.185790] env[68798]: DEBUG nova.compute.manager [None req-bdc8455a-855c-4119-bb4a-a15628227e40 tempest-FloatingIPsAssociationTestJSON-1950445159 tempest-FloatingIPsAssociationTestJSON-1950445159-project-member] [instance: fd8e2ed6-f349-48a5-88db-54dd535587cf] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1138.205892] env[68798]: DEBUG nova.network.neutron [-] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1138.217690] env[68798]: INFO nova.compute.manager [-] [instance: 3bf7d713-8315-48d9-85dd-4ff09c9c7782] Took 0.04 seconds to deallocate network for instance. [ 1138.228213] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bdc8455a-855c-4119-bb4a-a15628227e40 tempest-FloatingIPsAssociationTestJSON-1950445159 tempest-FloatingIPsAssociationTestJSON-1950445159-project-member] Lock "fd8e2ed6-f349-48a5-88db-54dd535587cf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.104s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.240056] env[68798]: DEBUG nova.compute.manager [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1138.321014] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1138.321825] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1138.323086] env[68798]: INFO nova.compute.claims [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1138.338061] env[68798]: DEBUG oslo_concurrency.lockutils [None req-69c0c6d2-66cb-4f02-a51d-9374d1edf910 tempest-ImagesOneServerTestJSON-1858711136 tempest-ImagesOneServerTestJSON-1858711136-project-member] Lock "3bf7d713-8315-48d9-85dd-4ff09c9c7782" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.229s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.780300] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5121705-a305-463f-8859-ee2996146c67 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.788409] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d94574-96c4-4dee-a049-6abcfdce1808 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.821197] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed632ef-3deb-44b8-a334-45c41a7237f9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.829419] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-388d2e80-22b6-4731-a885-dc94ca233ec9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.843563] env[68798]: DEBUG nova.compute.provider_tree [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1138.852628] env[68798]: DEBUG nova.scheduler.client.report [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1138.867147] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.546s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.867686] env[68798]: DEBUG nova.compute.manager [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1138.904766] env[68798]: DEBUG nova.compute.utils [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1138.906027] env[68798]: DEBUG nova.compute.manager [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1138.906212] env[68798]: DEBUG nova.network.neutron [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1138.921053] env[68798]: DEBUG nova.compute.manager [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1138.975506] env[68798]: DEBUG nova.policy [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc6caa6d7e4a49cf83311781e678d723', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5a4aaad5cff4665905ecbfb2adf895c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 1138.987838] env[68798]: DEBUG nova.compute.manager [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1139.014451] env[68798]: DEBUG nova.virt.hardware [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1139.014620] env[68798]: DEBUG nova.virt.hardware [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1139.014784] env[68798]: DEBUG nova.virt.hardware [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1139.014969] env[68798]: DEBUG nova.virt.hardware [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1139.015149] env[68798]: DEBUG nova.virt.hardware [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1139.015304] env[68798]: DEBUG nova.virt.hardware [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1139.015513] env[68798]: DEBUG nova.virt.hardware [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1139.015671] env[68798]: DEBUG nova.virt.hardware [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1139.015836] env[68798]: DEBUG nova.virt.hardware [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1139.016013] env[68798]: DEBUG nova.virt.hardware [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1139.016178] env[68798]: DEBUG nova.virt.hardware [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1139.017259] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d83f86cd-c056-4bf2-857c-02771058d2a8 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.026256] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2c17af9-5a82-4519-a599-45630db03184 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.375186] env[68798]: DEBUG nova.network.neutron [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Successfully created port: ce8fefe5-5e67-46c6-98fa-aea94740a76b {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1140.097836] env[68798]: DEBUG nova.compute.manager [req-83f56de8-d566-4136-b46e-20416bc0dd39 req-a96f0b20-7f52-4524-9370-423be0211c33 service nova] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Received event network-vif-plugged-ce8fefe5-5e67-46c6-98fa-aea94740a76b {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1140.098233] env[68798]: DEBUG oslo_concurrency.lockutils [req-83f56de8-d566-4136-b46e-20416bc0dd39 req-a96f0b20-7f52-4524-9370-423be0211c33 service nova] Acquiring lock "5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1140.098321] env[68798]: DEBUG oslo_concurrency.lockutils [req-83f56de8-d566-4136-b46e-20416bc0dd39 req-a96f0b20-7f52-4524-9370-423be0211c33 service nova] Lock "5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.098458] env[68798]: DEBUG oslo_concurrency.lockutils [req-83f56de8-d566-4136-b46e-20416bc0dd39 req-a96f0b20-7f52-4524-9370-423be0211c33 service nova] Lock "5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1140.098626] env[68798]: DEBUG nova.compute.manager [req-83f56de8-d566-4136-b46e-20416bc0dd39 req-a96f0b20-7f52-4524-9370-423be0211c33 service nova] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] No waiting events found dispatching network-vif-plugged-ce8fefe5-5e67-46c6-98fa-aea94740a76b {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1140.098877] env[68798]: WARNING nova.compute.manager [req-83f56de8-d566-4136-b46e-20416bc0dd39 req-a96f0b20-7f52-4524-9370-423be0211c33 service nova] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Received unexpected event network-vif-plugged-ce8fefe5-5e67-46c6-98fa-aea94740a76b for instance with vm_state building and task_state spawning. [ 1140.184388] env[68798]: DEBUG nova.network.neutron [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Successfully updated port: ce8fefe5-5e67-46c6-98fa-aea94740a76b {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1140.195615] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Acquiring lock "refresh_cache-5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1140.195790] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Acquired lock "refresh_cache-5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.195919] env[68798]: DEBUG nova.network.neutron [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1140.239947] env[68798]: DEBUG nova.network.neutron [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1140.470295] env[68798]: DEBUG nova.network.neutron [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Updating instance_info_cache with network_info: [{"id": "ce8fefe5-5e67-46c6-98fa-aea94740a76b", "address": "fa:16:3e:ec:b2:38", "network": {"id": "5bb583ce-8d7c-4a1a-83dd-e53b28a5bdcc", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1715555607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5a4aaad5cff4665905ecbfb2adf895c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4", "external-id": "nsx-vlan-transportzone-545", "segmentation_id": 545, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce8fefe5-5e", "ovs_interfaceid": "ce8fefe5-5e67-46c6-98fa-aea94740a76b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1140.482081] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Releasing lock "refresh_cache-5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1140.482386] env[68798]: DEBUG nova.compute.manager [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Instance network_info: |[{"id": "ce8fefe5-5e67-46c6-98fa-aea94740a76b", "address": "fa:16:3e:ec:b2:38", "network": {"id": "5bb583ce-8d7c-4a1a-83dd-e53b28a5bdcc", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1715555607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5a4aaad5cff4665905ecbfb2adf895c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4", "external-id": "nsx-vlan-transportzone-545", "segmentation_id": 545, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce8fefe5-5e", "ovs_interfaceid": "ce8fefe5-5e67-46c6-98fa-aea94740a76b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1140.482815] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:b2:38', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ce8fefe5-5e67-46c6-98fa-aea94740a76b', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1140.490551] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Creating folder: Project (b5a4aaad5cff4665905ecbfb2adf895c). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1140.491122] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-88064df6-1889-46ce-a690-86f80a4534f1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.503063] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Created folder: Project (b5a4aaad5cff4665905ecbfb2adf895c) in parent group-v834492. [ 1140.503294] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Creating folder: Instances. Parent ref: group-v834553. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1140.503518] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-69aebd97-41fe-4d9e-8f46-faff42015cac {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.514157] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Created folder: Instances in parent group-v834553. [ 1140.514446] env[68798]: DEBUG oslo.service.loopingcall [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1140.514656] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1140.514872] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-77176b0c-0e8b-45fe-a9f0-7e62b0b0d670 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.534724] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1140.534724] env[68798]: value = "task-4217609" [ 1140.534724] env[68798]: _type = "Task" [ 1140.534724] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.543261] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217609, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.045499] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217609, 'name': CreateVM_Task, 'duration_secs': 0.311739} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.045691] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1141.046468] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1141.046578] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.046906] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1141.047185] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4780da2-c94e-4af6-9a5b-bb00a5d42cbc {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.052840] env[68798]: DEBUG oslo_vmware.api [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Waiting for the task: (returnval){ [ 1141.052840] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52298bf3-f8db-a834-a7a4-512e702fa9bc" [ 1141.052840] env[68798]: _type = "Task" [ 1141.052840] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.061725] env[68798]: DEBUG oslo_vmware.api [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52298bf3-f8db-a834-a7a4-512e702fa9bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.564023] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1141.564425] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1141.564512] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1142.204330] env[68798]: DEBUG nova.compute.manager [req-393fa91c-ba47-4b1c-b3e4-d2b4c01c3b99 req-c725e1dd-d245-4972-9dbb-fe77d33e2918 service nova] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Received event network-changed-ce8fefe5-5e67-46c6-98fa-aea94740a76b {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1142.204598] env[68798]: DEBUG nova.compute.manager [req-393fa91c-ba47-4b1c-b3e4-d2b4c01c3b99 req-c725e1dd-d245-4972-9dbb-fe77d33e2918 service nova] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Refreshing instance network info cache due to event network-changed-ce8fefe5-5e67-46c6-98fa-aea94740a76b. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1142.204814] env[68798]: DEBUG oslo_concurrency.lockutils [req-393fa91c-ba47-4b1c-b3e4-d2b4c01c3b99 req-c725e1dd-d245-4972-9dbb-fe77d33e2918 service nova] Acquiring lock "refresh_cache-5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1142.205025] env[68798]: DEBUG oslo_concurrency.lockutils [req-393fa91c-ba47-4b1c-b3e4-d2b4c01c3b99 req-c725e1dd-d245-4972-9dbb-fe77d33e2918 service nova] Acquired lock "refresh_cache-5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.205437] env[68798]: DEBUG nova.network.neutron [req-393fa91c-ba47-4b1c-b3e4-d2b4c01c3b99 req-c725e1dd-d245-4972-9dbb-fe77d33e2918 service nova] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Refreshing network info cache for port ce8fefe5-5e67-46c6-98fa-aea94740a76b {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1142.491978] env[68798]: DEBUG nova.network.neutron [req-393fa91c-ba47-4b1c-b3e4-d2b4c01c3b99 req-c725e1dd-d245-4972-9dbb-fe77d33e2918 service nova] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Updated VIF entry in instance network info cache for port ce8fefe5-5e67-46c6-98fa-aea94740a76b. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1142.492408] env[68798]: DEBUG nova.network.neutron [req-393fa91c-ba47-4b1c-b3e4-d2b4c01c3b99 req-c725e1dd-d245-4972-9dbb-fe77d33e2918 service nova] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Updating instance_info_cache with network_info: [{"id": "ce8fefe5-5e67-46c6-98fa-aea94740a76b", "address": "fa:16:3e:ec:b2:38", "network": {"id": "5bb583ce-8d7c-4a1a-83dd-e53b28a5bdcc", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1715555607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5a4aaad5cff4665905ecbfb2adf895c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4", "external-id": "nsx-vlan-transportzone-545", "segmentation_id": 545, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce8fefe5-5e", "ovs_interfaceid": "ce8fefe5-5e67-46c6-98fa-aea94740a76b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.503365] env[68798]: DEBUG oslo_concurrency.lockutils [req-393fa91c-ba47-4b1c-b3e4-d2b4c01c3b99 req-c725e1dd-d245-4972-9dbb-fe77d33e2918 service nova] Releasing lock "refresh_cache-5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1144.312918] env[68798]: DEBUG oslo_concurrency.lockutils [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquiring lock "e75b2848-5dfa-4ffa-b37a-6338c8221dd3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1144.313214] env[68798]: DEBUG oslo_concurrency.lockutils [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "e75b2848-5dfa-4ffa-b37a-6338c8221dd3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1149.002325] env[68798]: DEBUG oslo_concurrency.lockutils [None req-532044d4-89fe-4171-88c2-c8adf46a3f4f tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Acquiring lock "5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1158.049195] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1158.049446] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Cleaning up deleted instances {{(pid=68798) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11220}} [ 1158.061669] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] There are 0 instances to clean {{(pid=68798) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 1161.050077] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1161.050077] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1161.050461] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1161.050461] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Cleaning up deleted instances with incomplete migration {{(pid=68798) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11258}} [ 1162.059386] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1162.059710] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1162.059710] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1162.081287] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1162.081484] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1162.081619] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1162.081746] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1162.081876] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1162.081981] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1162.082117] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1162.082235] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1162.082351] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1162.082466] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1162.082608] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1162.083189] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1163.048879] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1163.049210] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1164.056258] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1164.056530] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1165.044598] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1166.048968] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1166.049382] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1168.048119] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1168.061154] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1168.061403] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1168.061538] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1168.061695] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1168.062936] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-677b7811-9035-4762-99b4-0ad659b4e888 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.073168] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3211b6dd-0b82-47b2-ac71-1ccf3ef65aac {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.088519] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3222b5a9-47ef-4a2a-a4e0-de94a2a707a8 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.096697] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f9592c6-06ee-4e0c-a575-f89a682aec00 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.127194] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180767MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1168.127382] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1168.127553] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1168.275572] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance cdb141da-a05c-4891-a33d-6e12eafe4f22 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1168.275743] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 551707b9-118e-45c8-a28f-e70486272f6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1168.275875] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 89f660c8-6efd-4789-90ee-67e42abc1db7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1168.276008] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 9923a3c7-f090-4a01-8c57-36c8c22c6b14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1168.276144] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1168.276264] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e848c3f4-64ff-4956-88e0-afa27be73068 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1168.276379] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 30e8027d-98b3-4a5f-9eb4-244846cb90e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1168.276496] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance a7060037-2580-464a-b434-90ffe7314bd1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1168.276630] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 1ae2e411-d8e4-4abb-8c7b-b907ebba094c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1168.276735] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1168.290575] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 5e53196f-984a-4d72-8e00-861ef0751dca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1168.302241] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 02e3ae68-7367-45db-9a2f-01a2e9f703ff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1168.313260] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 3a185352-1cdb-4aa1-b163-abc6e712690e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1168.324156] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance be335186-1418-480e-a213-dbe877aa1488 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1168.335257] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 7bea1932-0490-409b-99b0-bd1f3f1a9d5d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1168.346674] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 6669c663-e5ca-4257-b7aa-f694b12f91d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1168.357778] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance addf38a1-9a3f-4e4f-ae0a-011fa96b344a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1168.369995] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance be9d913d-aeb6-4ae9-baca-d1733e9e5734 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1168.382789] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 8f21bf87-c969-40dc-bbdc-9b9c0302b3ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1168.394303] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 6d29d382-0b65-4cce-a487-1e4096cb4907 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1168.405537] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 1a5e8081-1a86-4c91-8139-469b7825fc47 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1168.419311] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 66a828dc-4a00-49d8-944a-0c8a90d56219 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1168.432497] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance a4376aa4-f675-42e8-a908-a398ab8db455 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1168.443864] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance dab58ca2-ba8a-4f7d-acdd-dbb94b38ffbc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1168.456360] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance da5d9023-f6c1-44f8-9465-36aa2b109924 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1168.466675] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance a2cee0f3-08b3-4a25-9d5e-9760604ff948 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1168.479711] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e75b2848-5dfa-4ffa-b37a-6338c8221dd3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1168.480194] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1168.480194] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1168.499974] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Refreshing inventories for resource provider 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1168.515575] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Updating ProviderTree inventory for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1168.515771] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Updating inventory in ProviderTree for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1168.529191] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Refreshing aggregate associations for resource provider 855bb535-a51f-4f9d-8f32-8a3291b17319, aggregates: None {{(pid=68798) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1168.548377] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Refreshing trait associations for resource provider 855bb535-a51f-4f9d-8f32-8a3291b17319, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=68798) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1168.894152] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2224ee3-8242-442c-adb1-3f2429fe9c04 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.902633] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f39d487-cc78-4b87-be36-8b36e18b2735 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.933605] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74432ef1-ad31-4da9-be97-b367c4c9ae2e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.942255] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-554f396c-bb85-4ffe-bcf7-18fc2b6e7d46 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.957330] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1168.966951] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1168.985325] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1168.985667] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.858s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1175.446686] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._sync_power_states {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1175.479363] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Getting list of instances from cluster (obj){ [ 1175.479363] env[68798]: value = "domain-c8" [ 1175.479363] env[68798]: _type = "ClusterComputeResource" [ 1175.479363] env[68798]: } {{(pid=68798) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1175.481529] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16da26e0-c50a-4c7e-81d9-9f497b06cda6 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.504969] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Got total of 10 instances {{(pid=68798) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1175.505258] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Triggering sync for uuid cdb141da-a05c-4891-a33d-6e12eafe4f22 {{(pid=68798) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 1175.505560] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Triggering sync for uuid 551707b9-118e-45c8-a28f-e70486272f6e {{(pid=68798) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 1175.505796] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Triggering sync for uuid 89f660c8-6efd-4789-90ee-67e42abc1db7 {{(pid=68798) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 1175.506045] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Triggering sync for uuid 9923a3c7-f090-4a01-8c57-36c8c22c6b14 {{(pid=68798) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 1175.506223] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Triggering sync for uuid fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a {{(pid=68798) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 1175.506446] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Triggering sync for uuid e848c3f4-64ff-4956-88e0-afa27be73068 {{(pid=68798) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 1175.506708] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Triggering sync for uuid 30e8027d-98b3-4a5f-9eb4-244846cb90e2 {{(pid=68798) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 1175.506965] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Triggering sync for uuid a7060037-2580-464a-b434-90ffe7314bd1 {{(pid=68798) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 1175.507238] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Triggering sync for uuid 1ae2e411-d8e4-4abb-8c7b-b907ebba094c {{(pid=68798) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 1175.507648] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Triggering sync for uuid 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa {{(pid=68798) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 1175.508145] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "cdb141da-a05c-4891-a33d-6e12eafe4f22" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.508615] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "551707b9-118e-45c8-a28f-e70486272f6e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.508964] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "89f660c8-6efd-4789-90ee-67e42abc1db7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.509296] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "9923a3c7-f090-4a01-8c57-36c8c22c6b14" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.510029] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.510029] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "e848c3f4-64ff-4956-88e0-afa27be73068" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.510195] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "30e8027d-98b3-4a5f-9eb4-244846cb90e2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.510531] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "a7060037-2580-464a-b434-90ffe7314bd1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.510837] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "1ae2e411-d8e4-4abb-8c7b-b907ebba094c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1175.511157] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1183.097184] env[68798]: WARNING oslo_vmware.rw_handles [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1183.097184] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1183.097184] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1183.097184] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1183.097184] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1183.097184] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 1183.097184] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1183.097184] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1183.097184] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1183.097184] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1183.097184] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1183.097184] env[68798]: ERROR oslo_vmware.rw_handles [ 1183.097839] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/9f200068-5876-45a2-81de-897e774fc734/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1183.099783] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1183.100047] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Copying Virtual Disk [datastore1] vmware_temp/9f200068-5876-45a2-81de-897e774fc734/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/9f200068-5876-45a2-81de-897e774fc734/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1183.100366] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e19094bf-e7dc-4083-8d64-471050f1d180 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.109126] env[68798]: DEBUG oslo_vmware.api [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Waiting for the task: (returnval){ [ 1183.109126] env[68798]: value = "task-4217610" [ 1183.109126] env[68798]: _type = "Task" [ 1183.109126] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.118273] env[68798]: DEBUG oslo_vmware.api [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Task: {'id': task-4217610, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.620653] env[68798]: DEBUG oslo_vmware.exceptions [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1183.620945] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1183.621566] env[68798]: ERROR nova.compute.manager [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1183.621566] env[68798]: Faults: ['InvalidArgument'] [ 1183.621566] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Traceback (most recent call last): [ 1183.621566] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1183.621566] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] yield resources [ 1183.621566] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1183.621566] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] self.driver.spawn(context, instance, image_meta, [ 1183.621566] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1183.621566] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1183.621566] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1183.621566] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] self._fetch_image_if_missing(context, vi) [ 1183.621566] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1183.621566] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] image_cache(vi, tmp_image_ds_loc) [ 1183.621566] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1183.621566] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] vm_util.copy_virtual_disk( [ 1183.621566] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1183.621566] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] session._wait_for_task(vmdk_copy_task) [ 1183.621566] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1183.621566] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] return self.wait_for_task(task_ref) [ 1183.621566] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1183.621566] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] return evt.wait() [ 1183.621566] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1183.621566] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] result = hub.switch() [ 1183.621566] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1183.621566] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] return self.greenlet.switch() [ 1183.621566] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1183.621566] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] self.f(*self.args, **self.kw) [ 1183.621566] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1183.621566] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] raise exceptions.translate_fault(task_info.error) [ 1183.621566] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1183.621566] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Faults: ['InvalidArgument'] [ 1183.621566] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] [ 1183.622529] env[68798]: INFO nova.compute.manager [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Terminating instance [ 1183.623709] env[68798]: DEBUG oslo_concurrency.lockutils [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1183.623923] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1183.624192] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f4270ac6-069e-44e8-bad4-a22faef0d3df {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.626558] env[68798]: DEBUG nova.compute.manager [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1183.626755] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1183.627647] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf222dac-9bfb-4e2d-ab3b-cd6a5d593208 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.635787] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1183.636086] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-daf9e9c4-bc19-4459-8739-49ba42e7c5b7 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.638866] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1183.639094] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1183.640183] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba373df8-a222-4f70-bf53-62dc5797e222 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.645743] env[68798]: DEBUG oslo_vmware.api [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Waiting for the task: (returnval){ [ 1183.645743] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]525bee48-7945-2bc0-4794-04d3401ebde9" [ 1183.645743] env[68798]: _type = "Task" [ 1183.645743] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.654736] env[68798]: DEBUG oslo_vmware.api [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]525bee48-7945-2bc0-4794-04d3401ebde9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.727600] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1183.728119] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1183.728403] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Deleting the datastore file [datastore1] cdb141da-a05c-4891-a33d-6e12eafe4f22 {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1183.728709] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b3e5da0f-118f-40b5-ae65-4af37d970c3a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.736445] env[68798]: DEBUG oslo_vmware.api [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Waiting for the task: (returnval){ [ 1183.736445] env[68798]: value = "task-4217612" [ 1183.736445] env[68798]: _type = "Task" [ 1183.736445] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.745249] env[68798]: DEBUG oslo_vmware.api [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Task: {'id': task-4217612, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.156480] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1184.156887] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Creating directory with path [datastore1] vmware_temp/3e9a1401-5abe-4f94-861c-30e9081361a0/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1184.157081] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8aff4b5a-eaf9-4ede-8b76-d6ef1917aefb {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.170552] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Created directory with path [datastore1] vmware_temp/3e9a1401-5abe-4f94-861c-30e9081361a0/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1184.170793] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Fetch image to [datastore1] vmware_temp/3e9a1401-5abe-4f94-861c-30e9081361a0/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1184.171088] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/3e9a1401-5abe-4f94-861c-30e9081361a0/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1184.171938] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52d58993-d0c6-4e38-ab9c-fc5fd9d25de1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.181038] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-934c83e4-f54e-435c-aac3-407d45e7a39d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.192865] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb5292e-8ae5-4c6b-bc06-803bebd3a950 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.224960] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-032ed447-6476-4ea3-baee-775b91d8901f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.232578] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-cb0a0ffd-130d-4979-a166-4e59b902f4aa {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.245915] env[68798]: DEBUG oslo_vmware.api [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Task: {'id': task-4217612, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073825} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.246178] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1184.246365] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1184.246539] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1184.246714] env[68798]: INFO nova.compute.manager [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1184.248977] env[68798]: DEBUG nova.compute.claims [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1184.249228] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1184.249492] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1184.258467] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1184.320607] env[68798]: DEBUG oslo_vmware.rw_handles [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3e9a1401-5abe-4f94-861c-30e9081361a0/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1184.386060] env[68798]: DEBUG oslo_vmware.rw_handles [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1184.386287] env[68798]: DEBUG oslo_vmware.rw_handles [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3e9a1401-5abe-4f94-861c-30e9081361a0/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1184.705612] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-605a8b04-d78a-409f-a709-0605dea6926c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.713512] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dc0fbd3-616b-4f31-b686-0d5bc8f047d5 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.744892] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee6ce4a8-67e4-4615-aa9f-b871e883db48 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.753273] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84f6d3bf-5b8e-4698-9233-875cf99c8c27 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.767605] env[68798]: DEBUG nova.compute.provider_tree [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1184.777644] env[68798]: DEBUG nova.scheduler.client.report [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1184.796353] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.546s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1184.796455] env[68798]: ERROR nova.compute.manager [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1184.796455] env[68798]: Faults: ['InvalidArgument'] [ 1184.796455] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Traceback (most recent call last): [ 1184.796455] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1184.796455] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] self.driver.spawn(context, instance, image_meta, [ 1184.796455] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1184.796455] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1184.796455] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1184.796455] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] self._fetch_image_if_missing(context, vi) [ 1184.796455] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1184.796455] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] image_cache(vi, tmp_image_ds_loc) [ 1184.796455] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1184.796455] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] vm_util.copy_virtual_disk( [ 1184.796455] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1184.796455] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] session._wait_for_task(vmdk_copy_task) [ 1184.796455] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1184.796455] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] return self.wait_for_task(task_ref) [ 1184.796455] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1184.796455] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] return evt.wait() [ 1184.796455] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1184.796455] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] result = hub.switch() [ 1184.796455] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1184.796455] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] return self.greenlet.switch() [ 1184.796455] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1184.796455] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] self.f(*self.args, **self.kw) [ 1184.796455] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1184.796455] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] raise exceptions.translate_fault(task_info.error) [ 1184.796455] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1184.796455] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Faults: ['InvalidArgument'] [ 1184.796455] env[68798]: ERROR nova.compute.manager [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] [ 1184.797285] env[68798]: DEBUG nova.compute.utils [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1184.799741] env[68798]: DEBUG nova.compute.manager [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Build of instance cdb141da-a05c-4891-a33d-6e12eafe4f22 was re-scheduled: A specified parameter was not correct: fileType [ 1184.799741] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1184.800139] env[68798]: DEBUG nova.compute.manager [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1184.800402] env[68798]: DEBUG nova.compute.manager [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1184.800601] env[68798]: DEBUG nova.compute.manager [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1184.800769] env[68798]: DEBUG nova.network.neutron [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1185.190331] env[68798]: DEBUG nova.network.neutron [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1185.201626] env[68798]: INFO nova.compute.manager [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Took 0.40 seconds to deallocate network for instance. [ 1185.300745] env[68798]: INFO nova.scheduler.client.report [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Deleted allocations for instance cdb141da-a05c-4891-a33d-6e12eafe4f22 [ 1185.323835] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8616491b-0b12-4296-9afd-9f9742a37c5b tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Lock "cdb141da-a05c-4891-a33d-6e12eafe4f22" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 545.908s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1185.325172] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0c6ebe75-0926-4f39-a5ac-6882480e20f6 tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Lock "cdb141da-a05c-4891-a33d-6e12eafe4f22" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 346.600s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1185.325773] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0c6ebe75-0926-4f39-a5ac-6882480e20f6 tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Acquiring lock "cdb141da-a05c-4891-a33d-6e12eafe4f22-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1185.325773] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0c6ebe75-0926-4f39-a5ac-6882480e20f6 tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Lock "cdb141da-a05c-4891-a33d-6e12eafe4f22-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1185.326010] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0c6ebe75-0926-4f39-a5ac-6882480e20f6 tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Lock "cdb141da-a05c-4891-a33d-6e12eafe4f22-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1185.328454] env[68798]: INFO nova.compute.manager [None req-0c6ebe75-0926-4f39-a5ac-6882480e20f6 tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Terminating instance [ 1185.330494] env[68798]: DEBUG nova.compute.manager [None req-0c6ebe75-0926-4f39-a5ac-6882480e20f6 tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1185.330583] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-0c6ebe75-0926-4f39-a5ac-6882480e20f6 tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1185.331198] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7f6eee16-9d48-4162-af05-fdc094ae5d71 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.337573] env[68798]: DEBUG nova.compute.manager [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1185.344720] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7186d3c-9502-4501-bbd9-ffb74e94d0b5 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.375691] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-0c6ebe75-0926-4f39-a5ac-6882480e20f6 tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cdb141da-a05c-4891-a33d-6e12eafe4f22 could not be found. [ 1185.375832] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-0c6ebe75-0926-4f39-a5ac-6882480e20f6 tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1185.376025] env[68798]: INFO nova.compute.manager [None req-0c6ebe75-0926-4f39-a5ac-6882480e20f6 tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1185.376305] env[68798]: DEBUG oslo.service.loopingcall [None req-0c6ebe75-0926-4f39-a5ac-6882480e20f6 tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1185.376526] env[68798]: DEBUG nova.compute.manager [-] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1185.376625] env[68798]: DEBUG nova.network.neutron [-] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1185.400870] env[68798]: DEBUG oslo_concurrency.lockutils [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1185.401143] env[68798]: DEBUG oslo_concurrency.lockutils [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1185.402681] env[68798]: INFO nova.compute.claims [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1185.415207] env[68798]: DEBUG nova.network.neutron [-] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1185.426607] env[68798]: INFO nova.compute.manager [-] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] Took 0.05 seconds to deallocate network for instance. [ 1185.526954] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0c6ebe75-0926-4f39-a5ac-6882480e20f6 tempest-ServerRescueTestJSON-1069628985 tempest-ServerRescueTestJSON-1069628985-project-member] Lock "cdb141da-a05c-4891-a33d-6e12eafe4f22" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.202s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1185.527895] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "cdb141da-a05c-4891-a33d-6e12eafe4f22" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 10.020s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1185.528794] env[68798]: INFO nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: cdb141da-a05c-4891-a33d-6e12eafe4f22] During sync_power_state the instance has a pending task (deleting). Skip. [ 1185.528794] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "cdb141da-a05c-4891-a33d-6e12eafe4f22" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1185.815580] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc2f608b-2f26-46bf-9a12-50c3c9b39c7a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.823911] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cf4315d-183d-4898-9581-02768944e55d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.854601] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef69d31d-6a11-4beb-b892-870389b27ac7 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.863320] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a4cf26-08ba-4969-abc8-d6ab733a1274 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.877782] env[68798]: DEBUG nova.compute.provider_tree [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1185.888028] env[68798]: DEBUG nova.scheduler.client.report [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1185.907599] env[68798]: DEBUG oslo_concurrency.lockutils [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.506s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1185.908043] env[68798]: DEBUG nova.compute.manager [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1185.943062] env[68798]: DEBUG nova.compute.utils [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1185.944972] env[68798]: DEBUG nova.compute.manager [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1185.945202] env[68798]: DEBUG nova.network.neutron [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1185.954689] env[68798]: DEBUG nova.compute.manager [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1186.013997] env[68798]: DEBUG nova.policy [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '438a9873b1aa47d19e7fbf5ab9c86d7c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '058075e457d841a09fcc2d17898d8b66', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 1186.019407] env[68798]: DEBUG nova.compute.manager [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1186.045998] env[68798]: DEBUG nova.virt.hardware [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1186.046278] env[68798]: DEBUG nova.virt.hardware [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1186.046441] env[68798]: DEBUG nova.virt.hardware [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1186.046626] env[68798]: DEBUG nova.virt.hardware [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1186.046773] env[68798]: DEBUG nova.virt.hardware [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1186.047557] env[68798]: DEBUG nova.virt.hardware [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1186.048017] env[68798]: DEBUG nova.virt.hardware [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1186.048317] env[68798]: DEBUG nova.virt.hardware [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1186.048535] env[68798]: DEBUG nova.virt.hardware [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1186.048717] env[68798]: DEBUG nova.virt.hardware [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1186.048894] env[68798]: DEBUG nova.virt.hardware [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1186.049861] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2950c36-fed8-40af-84f7-d2fbce41c817 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.059098] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42005fe9-d8e9-4cca-9268-6cceb421752e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.415561] env[68798]: DEBUG nova.network.neutron [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Successfully created port: f07d3bb3-c9db-47c4-9bde-62be2d3247b0 {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1187.184609] env[68798]: DEBUG nova.network.neutron [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Successfully updated port: f07d3bb3-c9db-47c4-9bde-62be2d3247b0 {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1187.199240] env[68798]: DEBUG oslo_concurrency.lockutils [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Acquiring lock "refresh_cache-5e53196f-984a-4d72-8e00-861ef0751dca" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1187.199391] env[68798]: DEBUG oslo_concurrency.lockutils [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Acquired lock "refresh_cache-5e53196f-984a-4d72-8e00-861ef0751dca" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.199529] env[68798]: DEBUG nova.network.neutron [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1187.236628] env[68798]: DEBUG nova.compute.manager [req-5fd618ee-ce6b-4695-96f9-252e3ff93c48 req-f4a97c19-ba12-4860-a14c-24bba3260788 service nova] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Received event network-vif-plugged-f07d3bb3-c9db-47c4-9bde-62be2d3247b0 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1187.236857] env[68798]: DEBUG oslo_concurrency.lockutils [req-5fd618ee-ce6b-4695-96f9-252e3ff93c48 req-f4a97c19-ba12-4860-a14c-24bba3260788 service nova] Acquiring lock "5e53196f-984a-4d72-8e00-861ef0751dca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1187.238480] env[68798]: DEBUG oslo_concurrency.lockutils [req-5fd618ee-ce6b-4695-96f9-252e3ff93c48 req-f4a97c19-ba12-4860-a14c-24bba3260788 service nova] Lock "5e53196f-984a-4d72-8e00-861ef0751dca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1187.238480] env[68798]: DEBUG oslo_concurrency.lockutils [req-5fd618ee-ce6b-4695-96f9-252e3ff93c48 req-f4a97c19-ba12-4860-a14c-24bba3260788 service nova] Lock "5e53196f-984a-4d72-8e00-861ef0751dca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1187.238480] env[68798]: DEBUG nova.compute.manager [req-5fd618ee-ce6b-4695-96f9-252e3ff93c48 req-f4a97c19-ba12-4860-a14c-24bba3260788 service nova] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] No waiting events found dispatching network-vif-plugged-f07d3bb3-c9db-47c4-9bde-62be2d3247b0 {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1187.238480] env[68798]: WARNING nova.compute.manager [req-5fd618ee-ce6b-4695-96f9-252e3ff93c48 req-f4a97c19-ba12-4860-a14c-24bba3260788 service nova] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Received unexpected event network-vif-plugged-f07d3bb3-c9db-47c4-9bde-62be2d3247b0 for instance with vm_state building and task_state spawning. [ 1187.238480] env[68798]: DEBUG nova.compute.manager [req-5fd618ee-ce6b-4695-96f9-252e3ff93c48 req-f4a97c19-ba12-4860-a14c-24bba3260788 service nova] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Received event network-changed-f07d3bb3-c9db-47c4-9bde-62be2d3247b0 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1187.238480] env[68798]: DEBUG nova.compute.manager [req-5fd618ee-ce6b-4695-96f9-252e3ff93c48 req-f4a97c19-ba12-4860-a14c-24bba3260788 service nova] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Refreshing instance network info cache due to event network-changed-f07d3bb3-c9db-47c4-9bde-62be2d3247b0. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1187.238480] env[68798]: DEBUG oslo_concurrency.lockutils [req-5fd618ee-ce6b-4695-96f9-252e3ff93c48 req-f4a97c19-ba12-4860-a14c-24bba3260788 service nova] Acquiring lock "refresh_cache-5e53196f-984a-4d72-8e00-861ef0751dca" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1187.267923] env[68798]: DEBUG nova.network.neutron [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1187.557489] env[68798]: DEBUG nova.network.neutron [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Updating instance_info_cache with network_info: [{"id": "f07d3bb3-c9db-47c4-9bde-62be2d3247b0", "address": "fa:16:3e:52:3c:cf", "network": {"id": "f9a388b0-45e0-42d4-8047-5f3e84307ee0", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-305100413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "058075e457d841a09fcc2d17898d8b66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1470a3f8-be8a-4339-8a6f-9519366f32e4", "external-id": "nsx-vlan-transportzone-375", "segmentation_id": 375, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf07d3bb3-c9", "ovs_interfaceid": "f07d3bb3-c9db-47c4-9bde-62be2d3247b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1187.570686] env[68798]: DEBUG oslo_concurrency.lockutils [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Releasing lock "refresh_cache-5e53196f-984a-4d72-8e00-861ef0751dca" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1187.571286] env[68798]: DEBUG nova.compute.manager [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Instance network_info: |[{"id": "f07d3bb3-c9db-47c4-9bde-62be2d3247b0", "address": "fa:16:3e:52:3c:cf", "network": {"id": "f9a388b0-45e0-42d4-8047-5f3e84307ee0", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-305100413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "058075e457d841a09fcc2d17898d8b66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1470a3f8-be8a-4339-8a6f-9519366f32e4", "external-id": "nsx-vlan-transportzone-375", "segmentation_id": 375, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf07d3bb3-c9", "ovs_interfaceid": "f07d3bb3-c9db-47c4-9bde-62be2d3247b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1187.571815] env[68798]: DEBUG oslo_concurrency.lockutils [req-5fd618ee-ce6b-4695-96f9-252e3ff93c48 req-f4a97c19-ba12-4860-a14c-24bba3260788 service nova] Acquired lock "refresh_cache-5e53196f-984a-4d72-8e00-861ef0751dca" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.572396] env[68798]: DEBUG nova.network.neutron [req-5fd618ee-ce6b-4695-96f9-252e3ff93c48 req-f4a97c19-ba12-4860-a14c-24bba3260788 service nova] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Refreshing network info cache for port f07d3bb3-c9db-47c4-9bde-62be2d3247b0 {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1187.573895] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:3c:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1470a3f8-be8a-4339-8a6f-9519366f32e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f07d3bb3-c9db-47c4-9bde-62be2d3247b0', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1187.583026] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Creating folder: Project (058075e457d841a09fcc2d17898d8b66). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1187.584095] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ed28df3d-dce5-4f58-a963-cd5d4d343e78 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.599208] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Created folder: Project (058075e457d841a09fcc2d17898d8b66) in parent group-v834492. [ 1187.599676] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Creating folder: Instances. Parent ref: group-v834556. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1187.601022] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7a386387-eccc-4c18-b6c2-2fa2404da756 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.610951] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Created folder: Instances in parent group-v834556. [ 1187.611465] env[68798]: DEBUG oslo.service.loopingcall [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1187.611785] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1187.612243] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-78b56b49-b7ff-4e7d-9998-21ceed63c082 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.636050] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1187.636050] env[68798]: value = "task-4217615" [ 1187.636050] env[68798]: _type = "Task" [ 1187.636050] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.644290] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217615, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.146469] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217615, 'name': CreateVM_Task, 'duration_secs': 0.314317} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.146653] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1188.147419] env[68798]: DEBUG oslo_concurrency.lockutils [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1188.147587] env[68798]: DEBUG oslo_concurrency.lockutils [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1188.147903] env[68798]: DEBUG oslo_concurrency.lockutils [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1188.148182] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f21bef2-773e-460e-a672-2a08f5c2574d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.154201] env[68798]: DEBUG oslo_vmware.api [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Waiting for the task: (returnval){ [ 1188.154201] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52b2ba83-765f-2af2-4606-792da31dad81" [ 1188.154201] env[68798]: _type = "Task" [ 1188.154201] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.162248] env[68798]: DEBUG oslo_vmware.api [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52b2ba83-765f-2af2-4606-792da31dad81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.167029] env[68798]: DEBUG nova.network.neutron [req-5fd618ee-ce6b-4695-96f9-252e3ff93c48 req-f4a97c19-ba12-4860-a14c-24bba3260788 service nova] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Updated VIF entry in instance network info cache for port f07d3bb3-c9db-47c4-9bde-62be2d3247b0. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1188.167387] env[68798]: DEBUG nova.network.neutron [req-5fd618ee-ce6b-4695-96f9-252e3ff93c48 req-f4a97c19-ba12-4860-a14c-24bba3260788 service nova] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Updating instance_info_cache with network_info: [{"id": "f07d3bb3-c9db-47c4-9bde-62be2d3247b0", "address": "fa:16:3e:52:3c:cf", "network": {"id": "f9a388b0-45e0-42d4-8047-5f3e84307ee0", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-305100413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "058075e457d841a09fcc2d17898d8b66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1470a3f8-be8a-4339-8a6f-9519366f32e4", "external-id": "nsx-vlan-transportzone-375", "segmentation_id": 375, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf07d3bb3-c9", "ovs_interfaceid": "f07d3bb3-c9db-47c4-9bde-62be2d3247b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1188.176897] env[68798]: DEBUG oslo_concurrency.lockutils [req-5fd618ee-ce6b-4695-96f9-252e3ff93c48 req-f4a97c19-ba12-4860-a14c-24bba3260788 service nova] Releasing lock "refresh_cache-5e53196f-984a-4d72-8e00-861ef0751dca" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1188.665269] env[68798]: DEBUG oslo_concurrency.lockutils [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1188.665722] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1188.665767] env[68798]: DEBUG oslo_concurrency.lockutils [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1193.303512] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cc6fee12-b163-469a-9a92-fd8b19db0c0e tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Acquiring lock "5e53196f-984a-4d72-8e00-861ef0751dca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1222.113117] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1223.044670] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1223.048384] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1223.048569] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1223.048716] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1223.073081] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1223.073081] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1223.073081] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1223.073081] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1223.073081] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1223.073081] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1223.073081] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1223.073398] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1223.073398] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1223.073398] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1223.073513] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1223.074346] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1223.074346] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1224.048602] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1225.048928] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1226.048644] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1226.048823] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1227.275449] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquiring lock "cbe4e626-f063-4877-985f-b3e36c161c9e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1227.275981] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "cbe4e626-f063-4877-985f-b3e36c161c9e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1229.049057] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1229.062713] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1229.062990] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1229.063186] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1229.063347] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1229.065223] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc18b31d-6c36-4d78-a8b5-81200bea9962 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.075521] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a84c3a03-7be4-4fa9-bce8-663bc5d350a2 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.090461] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e1cbf60-a744-4c25-bf15-26cb1af07979 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.097544] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-323d8a7b-b717-4ff4-ad6f-ac9d4e75f0aa {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.129510] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180766MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1229.129676] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1229.129881] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1229.215638] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 551707b9-118e-45c8-a28f-e70486272f6e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1229.215812] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 89f660c8-6efd-4789-90ee-67e42abc1db7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1229.215941] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 9923a3c7-f090-4a01-8c57-36c8c22c6b14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1229.216081] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1229.216206] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e848c3f4-64ff-4956-88e0-afa27be73068 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1229.216323] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 30e8027d-98b3-4a5f-9eb4-244846cb90e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1229.216442] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance a7060037-2580-464a-b434-90ffe7314bd1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1229.216559] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 1ae2e411-d8e4-4abb-8c7b-b907ebba094c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1229.216676] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1229.216791] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 5e53196f-984a-4d72-8e00-861ef0751dca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1229.229033] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 7bea1932-0490-409b-99b0-bd1f3f1a9d5d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1229.240283] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 6669c663-e5ca-4257-b7aa-f694b12f91d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1229.251500] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance addf38a1-9a3f-4e4f-ae0a-011fa96b344a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1229.261660] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance be9d913d-aeb6-4ae9-baca-d1733e9e5734 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1229.271890] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 8f21bf87-c969-40dc-bbdc-9b9c0302b3ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1229.282128] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 6d29d382-0b65-4cce-a487-1e4096cb4907 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1229.292387] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 1a5e8081-1a86-4c91-8139-469b7825fc47 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1229.302426] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 66a828dc-4a00-49d8-944a-0c8a90d56219 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1229.313085] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance a4376aa4-f675-42e8-a908-a398ab8db455 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1229.323468] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance dab58ca2-ba8a-4f7d-acdd-dbb94b38ffbc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1229.334384] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance da5d9023-f6c1-44f8-9465-36aa2b109924 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1229.344014] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance a2cee0f3-08b3-4a25-9d5e-9760604ff948 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1229.353614] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e75b2848-5dfa-4ffa-b37a-6338c8221dd3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1229.364247] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance cbe4e626-f063-4877-985f-b3e36c161c9e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1229.364490] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1229.364636] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1229.704645] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d551e2d3-e51c-4020-be8b-6e442f46c7b0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.712608] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2d60eba-140b-4853-8fbb-2a0127b0a53e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.742685] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7497c43d-55cc-4143-a3f3-1cd198e2288a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.751873] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e619f06-09ec-4523-a394-adb8e4c36363 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.764827] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1229.773377] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1229.788247] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1229.788442] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.659s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1232.710046] env[68798]: WARNING oslo_vmware.rw_handles [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1232.710046] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1232.710046] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1232.710046] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1232.710046] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1232.710046] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 1232.710046] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1232.710046] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1232.710046] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1232.710046] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1232.710046] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1232.710046] env[68798]: ERROR oslo_vmware.rw_handles [ 1232.710826] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/3e9a1401-5abe-4f94-861c-30e9081361a0/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1232.712313] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1232.712585] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Copying Virtual Disk [datastore1] vmware_temp/3e9a1401-5abe-4f94-861c-30e9081361a0/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/3e9a1401-5abe-4f94-861c-30e9081361a0/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1232.712826] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6da1a565-15a4-4e21-a4c1-b88dde1aeb10 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.720819] env[68798]: DEBUG oslo_vmware.api [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Waiting for the task: (returnval){ [ 1232.720819] env[68798]: value = "task-4217616" [ 1232.720819] env[68798]: _type = "Task" [ 1232.720819] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.729299] env[68798]: DEBUG oslo_vmware.api [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Task: {'id': task-4217616, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.232272] env[68798]: DEBUG oslo_vmware.exceptions [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1233.232570] env[68798]: DEBUG oslo_concurrency.lockutils [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1233.233196] env[68798]: ERROR nova.compute.manager [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1233.233196] env[68798]: Faults: ['InvalidArgument'] [ 1233.233196] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Traceback (most recent call last): [ 1233.233196] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1233.233196] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] yield resources [ 1233.233196] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1233.233196] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] self.driver.spawn(context, instance, image_meta, [ 1233.233196] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1233.233196] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1233.233196] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1233.233196] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] self._fetch_image_if_missing(context, vi) [ 1233.233196] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1233.233196] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] image_cache(vi, tmp_image_ds_loc) [ 1233.233196] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1233.233196] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] vm_util.copy_virtual_disk( [ 1233.233196] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1233.233196] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] session._wait_for_task(vmdk_copy_task) [ 1233.233196] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1233.233196] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] return self.wait_for_task(task_ref) [ 1233.233196] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1233.233196] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] return evt.wait() [ 1233.233196] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1233.233196] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] result = hub.switch() [ 1233.233196] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1233.233196] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] return self.greenlet.switch() [ 1233.233196] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1233.233196] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] self.f(*self.args, **self.kw) [ 1233.233196] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1233.233196] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] raise exceptions.translate_fault(task_info.error) [ 1233.233196] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1233.233196] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Faults: ['InvalidArgument'] [ 1233.233196] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] [ 1233.234145] env[68798]: INFO nova.compute.manager [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Terminating instance [ 1233.235202] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1233.235418] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1233.235668] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b3cbe836-39d8-4ca8-8ace-b4557b9fb25e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.238499] env[68798]: DEBUG nova.compute.manager [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1233.238499] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1233.239021] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dbfd6a2-7d6d-4a8d-8cb0-b94009671fbb {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.249301] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1233.250426] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-92882c98-2566-47f4-b06b-57d8f189939a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.252029] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1233.252197] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1233.252870] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b01f514f-839c-4069-8110-5aafc37072d3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.258501] env[68798]: DEBUG oslo_vmware.api [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Waiting for the task: (returnval){ [ 1233.258501] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]521b6099-a4e2-ec02-4127-8f31d6f5dacc" [ 1233.258501] env[68798]: _type = "Task" [ 1233.258501] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.266752] env[68798]: DEBUG oslo_vmware.api [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]521b6099-a4e2-ec02-4127-8f31d6f5dacc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.334033] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1233.334181] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1233.334371] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Deleting the datastore file [datastore1] 551707b9-118e-45c8-a28f-e70486272f6e {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1233.334665] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d1c34157-0d3a-4344-962c-706f49e56c0c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.342754] env[68798]: DEBUG oslo_vmware.api [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Waiting for the task: (returnval){ [ 1233.342754] env[68798]: value = "task-4217618" [ 1233.342754] env[68798]: _type = "Task" [ 1233.342754] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.352441] env[68798]: DEBUG oslo_vmware.api [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Task: {'id': task-4217618, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.768721] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1233.770232] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Creating directory with path [datastore1] vmware_temp/4b8aadba-3865-4dbd-9c86-504617ae40b8/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1233.770232] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f6b431f-71eb-4e11-87aa-fe71624f51a5 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.782078] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Created directory with path [datastore1] vmware_temp/4b8aadba-3865-4dbd-9c86-504617ae40b8/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1233.782282] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Fetch image to [datastore1] vmware_temp/4b8aadba-3865-4dbd-9c86-504617ae40b8/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1233.782456] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/4b8aadba-3865-4dbd-9c86-504617ae40b8/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1233.783255] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a985ed9-7296-47ee-a599-884fa9019eef {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.790015] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7b3e03b-ebbf-4f8a-8be5-fedf8eda5206 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.800148] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5f1e098-71b4-45da-afe0-533eb460e5bb {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.830817] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca5e9789-16e2-4fa7-9f56-d20e541779c6 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.837465] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ae09a0b9-f0a8-4536-ab77-a41cd3e80bf1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.852087] env[68798]: DEBUG oslo_vmware.api [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Task: {'id': task-4217618, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.087256} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.852278] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1233.852465] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1233.852638] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1233.852853] env[68798]: INFO nova.compute.manager [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1233.855116] env[68798]: DEBUG nova.compute.claims [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1233.855302] env[68798]: DEBUG oslo_concurrency.lockutils [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1233.855521] env[68798]: DEBUG oslo_concurrency.lockutils [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1233.861158] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1233.915452] env[68798]: DEBUG oslo_vmware.rw_handles [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4b8aadba-3865-4dbd-9c86-504617ae40b8/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1233.974793] env[68798]: DEBUG oslo_vmware.rw_handles [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1233.974991] env[68798]: DEBUG oslo_vmware.rw_handles [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4b8aadba-3865-4dbd-9c86-504617ae40b8/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1234.211637] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5da5b04-cc61-4c14-8e20-6a59b7a4455f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.219673] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b5867a5-4467-4760-9f01-8df69566f9ee {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.250523] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7cda7f9-feb8-4b5e-a1a6-f7c8f090a109 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.258814] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3077297-fd99-491d-980b-953a33004b61 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.275149] env[68798]: DEBUG nova.compute.provider_tree [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1234.283620] env[68798]: DEBUG nova.scheduler.client.report [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1234.298941] env[68798]: DEBUG oslo_concurrency.lockutils [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.443s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1234.299662] env[68798]: ERROR nova.compute.manager [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1234.299662] env[68798]: Faults: ['InvalidArgument'] [ 1234.299662] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Traceback (most recent call last): [ 1234.299662] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1234.299662] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] self.driver.spawn(context, instance, image_meta, [ 1234.299662] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1234.299662] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1234.299662] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1234.299662] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] self._fetch_image_if_missing(context, vi) [ 1234.299662] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1234.299662] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] image_cache(vi, tmp_image_ds_loc) [ 1234.299662] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1234.299662] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] vm_util.copy_virtual_disk( [ 1234.299662] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1234.299662] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] session._wait_for_task(vmdk_copy_task) [ 1234.299662] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1234.299662] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] return self.wait_for_task(task_ref) [ 1234.299662] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1234.299662] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] return evt.wait() [ 1234.299662] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1234.299662] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] result = hub.switch() [ 1234.299662] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1234.299662] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] return self.greenlet.switch() [ 1234.299662] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1234.299662] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] self.f(*self.args, **self.kw) [ 1234.299662] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1234.299662] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] raise exceptions.translate_fault(task_info.error) [ 1234.299662] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1234.299662] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Faults: ['InvalidArgument'] [ 1234.299662] env[68798]: ERROR nova.compute.manager [instance: 551707b9-118e-45c8-a28f-e70486272f6e] [ 1234.300647] env[68798]: DEBUG nova.compute.utils [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1234.301842] env[68798]: DEBUG nova.compute.manager [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Build of instance 551707b9-118e-45c8-a28f-e70486272f6e was re-scheduled: A specified parameter was not correct: fileType [ 1234.301842] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1234.302228] env[68798]: DEBUG nova.compute.manager [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1234.302403] env[68798]: DEBUG nova.compute.manager [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1234.302576] env[68798]: DEBUG nova.compute.manager [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1234.302742] env[68798]: DEBUG nova.network.neutron [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1234.750222] env[68798]: DEBUG nova.network.neutron [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1234.761812] env[68798]: INFO nova.compute.manager [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Took 0.46 seconds to deallocate network for instance. [ 1234.863695] env[68798]: INFO nova.scheduler.client.report [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Deleted allocations for instance 551707b9-118e-45c8-a28f-e70486272f6e [ 1234.888588] env[68798]: DEBUG oslo_concurrency.lockutils [None req-00242446-bc2c-45b2-8a38-3d36489ac3b1 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Lock "551707b9-118e-45c8-a28f-e70486272f6e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 595.029s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1234.889849] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f7c4ea09-1e0f-446b-b52b-34b23d6afb22 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Lock "551707b9-118e-45c8-a28f-e70486272f6e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 386.788s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1234.890099] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f7c4ea09-1e0f-446b-b52b-34b23d6afb22 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Acquiring lock "551707b9-118e-45c8-a28f-e70486272f6e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.890314] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f7c4ea09-1e0f-446b-b52b-34b23d6afb22 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Lock "551707b9-118e-45c8-a28f-e70486272f6e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1234.890556] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f7c4ea09-1e0f-446b-b52b-34b23d6afb22 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Lock "551707b9-118e-45c8-a28f-e70486272f6e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1234.892624] env[68798]: INFO nova.compute.manager [None req-f7c4ea09-1e0f-446b-b52b-34b23d6afb22 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Terminating instance [ 1234.894370] env[68798]: DEBUG nova.compute.manager [None req-f7c4ea09-1e0f-446b-b52b-34b23d6afb22 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1234.894562] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c4ea09-1e0f-446b-b52b-34b23d6afb22 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1234.895039] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-179530f2-f4bb-4839-b7b3-187d068e2f9d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.904150] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02c0c2dc-66c8-4e29-bd6a-1f02d64400d4 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.915380] env[68798]: DEBUG nova.compute.manager [None req-94a572fa-5613-4057-912e-ecb40322e7dd tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 02e3ae68-7367-45db-9a2f-01a2e9f703ff] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1234.938056] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-f7c4ea09-1e0f-446b-b52b-34b23d6afb22 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 551707b9-118e-45c8-a28f-e70486272f6e could not be found. [ 1234.938274] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c4ea09-1e0f-446b-b52b-34b23d6afb22 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1234.938453] env[68798]: INFO nova.compute.manager [None req-f7c4ea09-1e0f-446b-b52b-34b23d6afb22 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1234.938705] env[68798]: DEBUG oslo.service.loopingcall [None req-f7c4ea09-1e0f-446b-b52b-34b23d6afb22 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1234.939465] env[68798]: DEBUG nova.compute.manager [-] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1234.939573] env[68798]: DEBUG nova.network.neutron [-] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1234.941562] env[68798]: DEBUG nova.compute.manager [None req-94a572fa-5613-4057-912e-ecb40322e7dd tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 02e3ae68-7367-45db-9a2f-01a2e9f703ff] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1234.963833] env[68798]: DEBUG oslo_concurrency.lockutils [None req-94a572fa-5613-4057-912e-ecb40322e7dd tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Lock "02e3ae68-7367-45db-9a2f-01a2e9f703ff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 238.409s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1234.967616] env[68798]: DEBUG nova.network.neutron [-] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1234.974363] env[68798]: DEBUG nova.compute.manager [None req-ca7f9759-8978-49da-8895-4e335d31fb87 tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 3a185352-1cdb-4aa1-b163-abc6e712690e] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1234.977339] env[68798]: INFO nova.compute.manager [-] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] Took 0.04 seconds to deallocate network for instance. [ 1234.997171] env[68798]: DEBUG nova.compute.manager [None req-ca7f9759-8978-49da-8895-4e335d31fb87 tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 3a185352-1cdb-4aa1-b163-abc6e712690e] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1235.018627] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ca7f9759-8978-49da-8895-4e335d31fb87 tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Lock "3a185352-1cdb-4aa1-b163-abc6e712690e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 237.891s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1235.035940] env[68798]: DEBUG nova.compute.manager [None req-94a16663-71c3-4675-bcdf-4321317db602 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: be335186-1418-480e-a213-dbe877aa1488] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1235.066212] env[68798]: DEBUG nova.compute.manager [None req-94a16663-71c3-4675-bcdf-4321317db602 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: be335186-1418-480e-a213-dbe877aa1488] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1235.090608] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f7c4ea09-1e0f-446b-b52b-34b23d6afb22 tempest-ServerRescueNegativeTestJSON-920146277 tempest-ServerRescueNegativeTestJSON-920146277-project-member] Lock "551707b9-118e-45c8-a28f-e70486272f6e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.201s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1235.091526] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "551707b9-118e-45c8-a28f-e70486272f6e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 59.583s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1235.091670] env[68798]: INFO nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 551707b9-118e-45c8-a28f-e70486272f6e] During sync_power_state the instance has a pending task (deleting). Skip. [ 1235.091970] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "551707b9-118e-45c8-a28f-e70486272f6e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1235.093077] env[68798]: DEBUG oslo_concurrency.lockutils [None req-94a16663-71c3-4675-bcdf-4321317db602 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "be335186-1418-480e-a213-dbe877aa1488" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.801s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1235.101843] env[68798]: DEBUG nova.compute.manager [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1235.152614] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1235.152872] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1235.154507] env[68798]: INFO nova.compute.claims [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1235.722294] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b9e05f5-a3a5-440d-89b5-7d41ac3abc1b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.731467] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efdd6140-f771-4564-8e76-1486e86f38bd {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.763630] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eea1f80-6c47-490d-85fe-088af85ad264 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.771849] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f319222a-9c21-4efa-9b17-81e365354e1a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.785725] env[68798]: DEBUG nova.compute.provider_tree [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1235.795025] env[68798]: DEBUG nova.scheduler.client.report [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1235.811213] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.658s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1235.811768] env[68798]: DEBUG nova.compute.manager [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1235.849296] env[68798]: DEBUG nova.compute.utils [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1235.850712] env[68798]: DEBUG nova.compute.manager [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1235.850884] env[68798]: DEBUG nova.network.neutron [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1235.863616] env[68798]: DEBUG nova.compute.manager [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1235.923341] env[68798]: DEBUG nova.policy [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0b7487028842413d824c9bccc8a8cdd5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fc915fe2f841475892db299b77a09d34', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 1235.930181] env[68798]: DEBUG nova.compute.manager [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1235.958881] env[68798]: DEBUG nova.virt.hardware [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1235.959180] env[68798]: DEBUG nova.virt.hardware [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1235.959599] env[68798]: DEBUG nova.virt.hardware [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1235.959599] env[68798]: DEBUG nova.virt.hardware [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1235.959770] env[68798]: DEBUG nova.virt.hardware [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1235.959917] env[68798]: DEBUG nova.virt.hardware [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1235.960140] env[68798]: DEBUG nova.virt.hardware [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1235.960303] env[68798]: DEBUG nova.virt.hardware [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1235.960493] env[68798]: DEBUG nova.virt.hardware [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1235.960670] env[68798]: DEBUG nova.virt.hardware [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1235.960845] env[68798]: DEBUG nova.virt.hardware [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1235.961748] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e390e3-cad4-42f2-8892-a8f5afde593d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.976414] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aec862f-7678-40a6-a9ed-ca513d873d19 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.215935] env[68798]: DEBUG oslo_concurrency.lockutils [None req-edb710c6-36ac-4f38-8eaf-b37a9dc211bc tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Acquiring lock "7bea1932-0490-409b-99b0-bd1f3f1a9d5d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1236.442598] env[68798]: DEBUG nova.network.neutron [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Successfully created port: bac5b122-e2ed-43de-a080-d3ef3f0aa2c8 {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1237.388715] env[68798]: DEBUG nova.compute.manager [req-88d87daa-db79-4c59-85c4-0b1fc8ed8918 req-bd173a37-e0e9-41b0-b04c-15438d839786 service nova] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Received event network-vif-plugged-bac5b122-e2ed-43de-a080-d3ef3f0aa2c8 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1237.388715] env[68798]: DEBUG oslo_concurrency.lockutils [req-88d87daa-db79-4c59-85c4-0b1fc8ed8918 req-bd173a37-e0e9-41b0-b04c-15438d839786 service nova] Acquiring lock "7bea1932-0490-409b-99b0-bd1f3f1a9d5d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1237.389103] env[68798]: DEBUG oslo_concurrency.lockutils [req-88d87daa-db79-4c59-85c4-0b1fc8ed8918 req-bd173a37-e0e9-41b0-b04c-15438d839786 service nova] Lock "7bea1932-0490-409b-99b0-bd1f3f1a9d5d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1237.389351] env[68798]: DEBUG oslo_concurrency.lockutils [req-88d87daa-db79-4c59-85c4-0b1fc8ed8918 req-bd173a37-e0e9-41b0-b04c-15438d839786 service nova] Lock "7bea1932-0490-409b-99b0-bd1f3f1a9d5d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1237.389619] env[68798]: DEBUG nova.compute.manager [req-88d87daa-db79-4c59-85c4-0b1fc8ed8918 req-bd173a37-e0e9-41b0-b04c-15438d839786 service nova] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] No waiting events found dispatching network-vif-plugged-bac5b122-e2ed-43de-a080-d3ef3f0aa2c8 {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1237.389843] env[68798]: WARNING nova.compute.manager [req-88d87daa-db79-4c59-85c4-0b1fc8ed8918 req-bd173a37-e0e9-41b0-b04c-15438d839786 service nova] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Received unexpected event network-vif-plugged-bac5b122-e2ed-43de-a080-d3ef3f0aa2c8 for instance with vm_state building and task_state deleting. [ 1237.553309] env[68798]: DEBUG nova.network.neutron [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Successfully updated port: bac5b122-e2ed-43de-a080-d3ef3f0aa2c8 {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1237.566052] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Acquiring lock "refresh_cache-7bea1932-0490-409b-99b0-bd1f3f1a9d5d" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1237.566219] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Acquired lock "refresh_cache-7bea1932-0490-409b-99b0-bd1f3f1a9d5d" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.566402] env[68798]: DEBUG nova.network.neutron [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1237.615773] env[68798]: DEBUG nova.network.neutron [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1237.839550] env[68798]: DEBUG nova.network.neutron [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Updating instance_info_cache with network_info: [{"id": "bac5b122-e2ed-43de-a080-d3ef3f0aa2c8", "address": "fa:16:3e:fe:b3:81", "network": {"id": "65092b3b-364d-4fac-86a4-e2bbbb781e83", "bridge": "br-int", "label": "tempest-ServersTestJSON-684731779-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc915fe2f841475892db299b77a09d34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "21310d90-efbc-45a8-a97f-c4358606530f", "external-id": "nsx-vlan-transportzone-672", "segmentation_id": 672, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbac5b122-e2", "ovs_interfaceid": "bac5b122-e2ed-43de-a080-d3ef3f0aa2c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1237.851632] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Releasing lock "refresh_cache-7bea1932-0490-409b-99b0-bd1f3f1a9d5d" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1237.852051] env[68798]: DEBUG nova.compute.manager [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Instance network_info: |[{"id": "bac5b122-e2ed-43de-a080-d3ef3f0aa2c8", "address": "fa:16:3e:fe:b3:81", "network": {"id": "65092b3b-364d-4fac-86a4-e2bbbb781e83", "bridge": "br-int", "label": "tempest-ServersTestJSON-684731779-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc915fe2f841475892db299b77a09d34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "21310d90-efbc-45a8-a97f-c4358606530f", "external-id": "nsx-vlan-transportzone-672", "segmentation_id": 672, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbac5b122-e2", "ovs_interfaceid": "bac5b122-e2ed-43de-a080-d3ef3f0aa2c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1237.852767] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:b3:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '21310d90-efbc-45a8-a97f-c4358606530f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bac5b122-e2ed-43de-a080-d3ef3f0aa2c8', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1237.861592] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Creating folder: Project (fc915fe2f841475892db299b77a09d34). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1237.865296] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5f07ee34-6f42-4af3-abdd-b49d838c2d78 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.874388] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Created folder: Project (fc915fe2f841475892db299b77a09d34) in parent group-v834492. [ 1237.874567] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Creating folder: Instances. Parent ref: group-v834559. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1237.875164] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f71fa9d9-6663-40f1-a477-d23d034cc12d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.883955] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Created folder: Instances in parent group-v834559. [ 1237.884209] env[68798]: DEBUG oslo.service.loopingcall [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1237.884392] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1237.884584] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b90f7d85-ed9a-4717-8ad4-bd1ccb9f5555 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.904161] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1237.904161] env[68798]: value = "task-4217621" [ 1237.904161] env[68798]: _type = "Task" [ 1237.904161] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.912693] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217621, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.414590] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217621, 'name': CreateVM_Task, 'duration_secs': 0.478592} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.414952] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1238.422139] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1238.422193] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1238.422544] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1238.422831] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cfab1f56-8778-4172-8180-d95710094a3b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.428513] env[68798]: DEBUG oslo_vmware.api [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Waiting for the task: (returnval){ [ 1238.428513] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52f47e27-de7d-46b1-6080-6b0bd2c0e21c" [ 1238.428513] env[68798]: _type = "Task" [ 1238.428513] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.437826] env[68798]: DEBUG oslo_vmware.api [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52f47e27-de7d-46b1-6080-6b0bd2c0e21c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.939504] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1238.939504] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1238.939703] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1239.416329] env[68798]: DEBUG nova.compute.manager [req-21c8c86b-b721-4310-99a9-4f0da1b1fb97 req-4b9a3d81-1197-422a-b375-b9fadc40b849 service nova] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Received event network-changed-bac5b122-e2ed-43de-a080-d3ef3f0aa2c8 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1239.416558] env[68798]: DEBUG nova.compute.manager [req-21c8c86b-b721-4310-99a9-4f0da1b1fb97 req-4b9a3d81-1197-422a-b375-b9fadc40b849 service nova] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Refreshing instance network info cache due to event network-changed-bac5b122-e2ed-43de-a080-d3ef3f0aa2c8. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1239.416784] env[68798]: DEBUG oslo_concurrency.lockutils [req-21c8c86b-b721-4310-99a9-4f0da1b1fb97 req-4b9a3d81-1197-422a-b375-b9fadc40b849 service nova] Acquiring lock "refresh_cache-7bea1932-0490-409b-99b0-bd1f3f1a9d5d" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1239.416859] env[68798]: DEBUG oslo_concurrency.lockutils [req-21c8c86b-b721-4310-99a9-4f0da1b1fb97 req-4b9a3d81-1197-422a-b375-b9fadc40b849 service nova] Acquired lock "refresh_cache-7bea1932-0490-409b-99b0-bd1f3f1a9d5d" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1239.417018] env[68798]: DEBUG nova.network.neutron [req-21c8c86b-b721-4310-99a9-4f0da1b1fb97 req-4b9a3d81-1197-422a-b375-b9fadc40b849 service nova] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Refreshing network info cache for port bac5b122-e2ed-43de-a080-d3ef3f0aa2c8 {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1239.735998] env[68798]: DEBUG nova.network.neutron [req-21c8c86b-b721-4310-99a9-4f0da1b1fb97 req-4b9a3d81-1197-422a-b375-b9fadc40b849 service nova] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Updated VIF entry in instance network info cache for port bac5b122-e2ed-43de-a080-d3ef3f0aa2c8. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1239.736356] env[68798]: DEBUG nova.network.neutron [req-21c8c86b-b721-4310-99a9-4f0da1b1fb97 req-4b9a3d81-1197-422a-b375-b9fadc40b849 service nova] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Updating instance_info_cache with network_info: [{"id": "bac5b122-e2ed-43de-a080-d3ef3f0aa2c8", "address": "fa:16:3e:fe:b3:81", "network": {"id": "65092b3b-364d-4fac-86a4-e2bbbb781e83", "bridge": "br-int", "label": "tempest-ServersTestJSON-684731779-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc915fe2f841475892db299b77a09d34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "21310d90-efbc-45a8-a97f-c4358606530f", "external-id": "nsx-vlan-transportzone-672", "segmentation_id": 672, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbac5b122-e2", "ovs_interfaceid": "bac5b122-e2ed-43de-a080-d3ef3f0aa2c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1239.750465] env[68798]: DEBUG oslo_concurrency.lockutils [req-21c8c86b-b721-4310-99a9-4f0da1b1fb97 req-4b9a3d81-1197-422a-b375-b9fadc40b849 service nova] Releasing lock "refresh_cache-7bea1932-0490-409b-99b0-bd1f3f1a9d5d" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1240.327736] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquiring lock "17cce398-d2f8-47a6-b714-c4e54caec516" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1240.327975] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Lock "17cce398-d2f8-47a6-b714-c4e54caec516" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1240.496590] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquiring lock "ca976c34-4eb0-46aa-a243-91401f842c32" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1240.496907] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Lock "ca976c34-4eb0-46aa-a243-91401f842c32" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1248.539410] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8ba38e10-c6ac-4617-acbe-fa6c10cc753b tempest-ServerActionsTestOtherA-140659728 tempest-ServerActionsTestOtherA-140659728-project-member] Acquiring lock "6cf9a284-56a7-4780-b7a1-fedf77f8231a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1248.539410] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8ba38e10-c6ac-4617-acbe-fa6c10cc753b tempest-ServerActionsTestOtherA-140659728 tempest-ServerActionsTestOtherA-140659728-project-member] Lock "6cf9a284-56a7-4780-b7a1-fedf77f8231a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1249.307697] env[68798]: DEBUG oslo_concurrency.lockutils [None req-3aa5332d-f931-4dbf-b50f-23803f72ffb5 tempest-VolumesAdminNegativeTest-2011042876 tempest-VolumesAdminNegativeTest-2011042876-project-member] Acquiring lock "802f2573-8a44-489d-a0a0-32ca69dc6281" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1249.307967] env[68798]: DEBUG oslo_concurrency.lockutils [None req-3aa5332d-f931-4dbf-b50f-23803f72ffb5 tempest-VolumesAdminNegativeTest-2011042876 tempest-VolumesAdminNegativeTest-2011042876-project-member] Lock "802f2573-8a44-489d-a0a0-32ca69dc6281" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1255.014175] env[68798]: DEBUG oslo_concurrency.lockutils [None req-b1df3437-ff84-4052-9da2-237214c20a03 tempest-AttachVolumeNegativeTest-740411461 tempest-AttachVolumeNegativeTest-740411461-project-member] Acquiring lock "19702e1f-2d11-492c-9e9e-067d1aa2b6a4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1255.014558] env[68798]: DEBUG oslo_concurrency.lockutils [None req-b1df3437-ff84-4052-9da2-237214c20a03 tempest-AttachVolumeNegativeTest-740411461 tempest-AttachVolumeNegativeTest-740411461-project-member] Lock "19702e1f-2d11-492c-9e9e-067d1aa2b6a4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1258.631530] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2dd0e14c-d042-44b2-8e8b-1f56e3c72134 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Acquiring lock "e42bca43-6e9e-49d5-8cbd-4c57e5f0123b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1258.631530] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2dd0e14c-d042-44b2-8e8b-1f56e3c72134 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Lock "e42bca43-6e9e-49d5-8cbd-4c57e5f0123b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1279.948732] env[68798]: DEBUG oslo_concurrency.lockutils [None req-962934df-df48-4bb7-9d93-2c0b4ceee911 tempest-MultipleCreateTestJSON-252684274 tempest-MultipleCreateTestJSON-252684274-project-member] Acquiring lock "de6bd0a0-27c9-4f6b-932f-d7fac5fd2e4c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1279.949100] env[68798]: DEBUG oslo_concurrency.lockutils [None req-962934df-df48-4bb7-9d93-2c0b4ceee911 tempest-MultipleCreateTestJSON-252684274 tempest-MultipleCreateTestJSON-252684274-project-member] Lock "de6bd0a0-27c9-4f6b-932f-d7fac5fd2e4c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1279.979166] env[68798]: DEBUG oslo_concurrency.lockutils [None req-962934df-df48-4bb7-9d93-2c0b4ceee911 tempest-MultipleCreateTestJSON-252684274 tempest-MultipleCreateTestJSON-252684274-project-member] Acquiring lock "17199829-f72a-4ba6-93b4-da057f00bbc7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1279.979388] env[68798]: DEBUG oslo_concurrency.lockutils [None req-962934df-df48-4bb7-9d93-2c0b4ceee911 tempest-MultipleCreateTestJSON-252684274 tempest-MultipleCreateTestJSON-252684274-project-member] Lock "17199829-f72a-4ba6-93b4-da057f00bbc7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1280.560129] env[68798]: WARNING oslo_vmware.rw_handles [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1280.560129] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1280.560129] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1280.560129] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1280.560129] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1280.560129] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 1280.560129] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1280.560129] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1280.560129] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1280.560129] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1280.560129] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1280.560129] env[68798]: ERROR oslo_vmware.rw_handles [ 1280.564019] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/4b8aadba-3865-4dbd-9c86-504617ae40b8/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1280.564019] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1280.564736] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Copying Virtual Disk [datastore1] vmware_temp/4b8aadba-3865-4dbd-9c86-504617ae40b8/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/4b8aadba-3865-4dbd-9c86-504617ae40b8/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1280.565249] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fdb089cd-59f1-451a-a25d-97d197223f64 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.575617] env[68798]: DEBUG oslo_vmware.api [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Waiting for the task: (returnval){ [ 1280.575617] env[68798]: value = "task-4217622" [ 1280.575617] env[68798]: _type = "Task" [ 1280.575617] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.588960] env[68798]: DEBUG oslo_vmware.api [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Task: {'id': task-4217622, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.087447] env[68798]: DEBUG oslo_vmware.exceptions [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1281.087754] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1281.092162] env[68798]: ERROR nova.compute.manager [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1281.092162] env[68798]: Faults: ['InvalidArgument'] [ 1281.092162] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Traceback (most recent call last): [ 1281.092162] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1281.092162] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] yield resources [ 1281.092162] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1281.092162] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] self.driver.spawn(context, instance, image_meta, [ 1281.092162] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1281.092162] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1281.092162] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1281.092162] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] self._fetch_image_if_missing(context, vi) [ 1281.092162] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1281.092162] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] image_cache(vi, tmp_image_ds_loc) [ 1281.092162] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1281.092162] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] vm_util.copy_virtual_disk( [ 1281.092162] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1281.092162] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] session._wait_for_task(vmdk_copy_task) [ 1281.092162] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1281.092162] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] return self.wait_for_task(task_ref) [ 1281.092162] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1281.092162] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] return evt.wait() [ 1281.092162] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1281.092162] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] result = hub.switch() [ 1281.092162] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1281.092162] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] return self.greenlet.switch() [ 1281.092162] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1281.092162] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] self.f(*self.args, **self.kw) [ 1281.092162] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1281.092162] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] raise exceptions.translate_fault(task_info.error) [ 1281.092162] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1281.092162] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Faults: ['InvalidArgument'] [ 1281.092162] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] [ 1281.093545] env[68798]: INFO nova.compute.manager [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Terminating instance [ 1281.094341] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1281.094558] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1281.095250] env[68798]: DEBUG nova.compute.manager [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1281.095453] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1281.095718] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5a3c3cbe-23e3-4edf-b405-ea71fa3c0f99 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.098266] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff051f0-4189-40eb-9798-bf9a3c8f9c70 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.106101] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1281.106358] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-19ae56a6-27db-4875-9fca-72248e95f331 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.109216] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1281.109365] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1281.110401] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4aedd8e-1c87-4d8c-95a9-f70dd062b736 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.116298] env[68798]: DEBUG oslo_vmware.api [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Waiting for the task: (returnval){ [ 1281.116298] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52be09e2-4226-5795-8b71-4effcdf6a6fb" [ 1281.116298] env[68798]: _type = "Task" [ 1281.116298] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.127267] env[68798]: DEBUG oslo_vmware.api [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52be09e2-4226-5795-8b71-4effcdf6a6fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.186990] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1281.187253] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1281.187435] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Deleting the datastore file [datastore1] 89f660c8-6efd-4789-90ee-67e42abc1db7 {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1281.187749] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5f6423d4-d519-4325-b2c1-550baecbc8b3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.194962] env[68798]: DEBUG oslo_vmware.api [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Waiting for the task: (returnval){ [ 1281.194962] env[68798]: value = "task-4217624" [ 1281.194962] env[68798]: _type = "Task" [ 1281.194962] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.204478] env[68798]: DEBUG oslo_vmware.api [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Task: {'id': task-4217624, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.627895] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1281.628176] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Creating directory with path [datastore1] vmware_temp/cffb6a90-9f4e-4f3f-85b4-9946f0120baf/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1281.628411] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8390af9c-ac74-4fbe-91bc-ad73a5b3c367 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.641610] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Created directory with path [datastore1] vmware_temp/cffb6a90-9f4e-4f3f-85b4-9946f0120baf/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1281.641852] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Fetch image to [datastore1] vmware_temp/cffb6a90-9f4e-4f3f-85b4-9946f0120baf/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1281.641994] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/cffb6a90-9f4e-4f3f-85b4-9946f0120baf/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1281.642844] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b478ce-bc42-4ba6-913b-1b0730f59271 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.651130] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c08eac96-10c7-4f86-af63-c4f43d3f9ea0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.660436] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ca7cb9-6622-43d9-982c-8713cd854b43 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.693091] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1590f9ff-700c-4b25-9bcb-f3e3a5c545af {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.704168] env[68798]: DEBUG oslo_vmware.api [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Task: {'id': task-4217624, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071678} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.705721] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1281.705964] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1281.706112] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1281.706291] env[68798]: INFO nova.compute.manager [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1281.708433] env[68798]: DEBUG nova.compute.claims [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1281.709029] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1281.709029] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1281.713509] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d47bef09-b9d5-4866-b44a-a642c0a4cdc0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.736326] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1281.764408] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6996578d-8fa5-4660-82e7-18a110b7045e tempest-AttachVolumeShelveTestJSON-1210845358 tempest-AttachVolumeShelveTestJSON-1210845358-project-member] Acquiring lock "dcced840-b57b-47bd-8d7b-bfe971290659" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1281.764408] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6996578d-8fa5-4660-82e7-18a110b7045e tempest-AttachVolumeShelveTestJSON-1210845358 tempest-AttachVolumeShelveTestJSON-1210845358-project-member] Lock "dcced840-b57b-47bd-8d7b-bfe971290659" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1281.992073] env[68798]: DEBUG oslo_vmware.rw_handles [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/cffb6a90-9f4e-4f3f-85b4-9946f0120baf/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1282.059624] env[68798]: DEBUG oslo_vmware.rw_handles [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1282.059624] env[68798]: DEBUG oslo_vmware.rw_handles [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/cffb6a90-9f4e-4f3f-85b4-9946f0120baf/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1282.152314] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14acd5b3-ba59-4cac-a813-60f7bfdb90c6 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.160032] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd0178f1-5bb3-4ae2-922e-6d49e46c10c9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.194031] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99066067-fc8e-4f3d-8c8e-1a73b9f4c768 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.202658] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae45fee0-893e-4445-893d-4f8116d904c9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.216645] env[68798]: DEBUG nova.compute.provider_tree [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1282.227356] env[68798]: DEBUG nova.scheduler.client.report [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1282.243778] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.535s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1282.244363] env[68798]: ERROR nova.compute.manager [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1282.244363] env[68798]: Faults: ['InvalidArgument'] [ 1282.244363] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Traceback (most recent call last): [ 1282.244363] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1282.244363] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] self.driver.spawn(context, instance, image_meta, [ 1282.244363] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1282.244363] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1282.244363] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1282.244363] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] self._fetch_image_if_missing(context, vi) [ 1282.244363] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1282.244363] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] image_cache(vi, tmp_image_ds_loc) [ 1282.244363] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1282.244363] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] vm_util.copy_virtual_disk( [ 1282.244363] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1282.244363] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] session._wait_for_task(vmdk_copy_task) [ 1282.244363] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1282.244363] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] return self.wait_for_task(task_ref) [ 1282.244363] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1282.244363] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] return evt.wait() [ 1282.244363] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1282.244363] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] result = hub.switch() [ 1282.244363] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1282.244363] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] return self.greenlet.switch() [ 1282.244363] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1282.244363] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] self.f(*self.args, **self.kw) [ 1282.244363] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1282.244363] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] raise exceptions.translate_fault(task_info.error) [ 1282.244363] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1282.244363] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Faults: ['InvalidArgument'] [ 1282.244363] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] [ 1282.245224] env[68798]: DEBUG nova.compute.utils [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1282.246647] env[68798]: DEBUG nova.compute.manager [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Build of instance 89f660c8-6efd-4789-90ee-67e42abc1db7 was re-scheduled: A specified parameter was not correct: fileType [ 1282.246647] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1282.247032] env[68798]: DEBUG nova.compute.manager [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1282.247214] env[68798]: DEBUG nova.compute.manager [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1282.247368] env[68798]: DEBUG nova.compute.manager [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1282.247533] env[68798]: DEBUG nova.network.neutron [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1282.428284] env[68798]: DEBUG neutronclient.v2_0.client [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68798) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1282.433458] env[68798]: ERROR nova.compute.manager [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Traceback (most recent call last): [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] self.driver.spawn(context, instance, image_meta, [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] self._fetch_image_if_missing(context, vi) [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] image_cache(vi, tmp_image_ds_loc) [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] vm_util.copy_virtual_disk( [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] session._wait_for_task(vmdk_copy_task) [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] return self.wait_for_task(task_ref) [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] return evt.wait() [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] result = hub.switch() [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] return self.greenlet.switch() [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] self.f(*self.args, **self.kw) [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] raise exceptions.translate_fault(task_info.error) [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Faults: ['InvalidArgument'] [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] During handling of the above exception, another exception occurred: [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Traceback (most recent call last): [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/compute/manager.py", line 2448, in _do_build_and_run_instance [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] self._build_and_run_instance(context, instance, image, [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/compute/manager.py", line 2740, in _build_and_run_instance [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] raise exception.RescheduledException( [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] nova.exception.RescheduledException: Build of instance 89f660c8-6efd-4789-90ee-67e42abc1db7 was re-scheduled: A specified parameter was not correct: fileType [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Faults: ['InvalidArgument'] [ 1282.433458] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] During handling of the above exception, another exception occurred: [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Traceback (most recent call last): [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] ret = obj(*args, **kwargs) [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] exception_handler_v20(status_code, error_body) [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] raise client_exc(message=error_message, [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Neutron server returns request_ids: ['req-05292015-aebe-41c5-93a6-a8d360e735d1'] [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] During handling of the above exception, another exception occurred: [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Traceback (most recent call last): [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/compute/manager.py", line 3037, in _cleanup_allocated_networks [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] self._deallocate_network(context, instance, requested_networks) [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] self.network_api.deallocate_for_instance( [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] data = neutron.list_ports(**search_opts) [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] ret = obj(*args, **kwargs) [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] return self.list('ports', self.ports_path, retrieve_all, [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] ret = obj(*args, **kwargs) [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] for r in self._pagination(collection, path, **params): [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] res = self.get(path, params=params) [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] ret = obj(*args, **kwargs) [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] return self.retry_request("GET", action, body=body, [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] ret = obj(*args, **kwargs) [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1282.434559] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] return self.do_request(method, action, body=body, [ 1282.435702] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1282.435702] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] ret = obj(*args, **kwargs) [ 1282.435702] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1282.435702] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] self._handle_fault_response(status_code, replybody, resp) [ 1282.435702] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1282.435702] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] raise exception.Unauthorized() [ 1282.435702] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] nova.exception.Unauthorized: Not authorized. [ 1282.435702] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] [ 1282.509298] env[68798]: INFO nova.scheduler.client.report [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Deleted allocations for instance 89f660c8-6efd-4789-90ee-67e42abc1db7 [ 1282.537030] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a759d608-5cd9-4d67-8917-f3ae5bf2acaa tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Lock "89f660c8-6efd-4789-90ee-67e42abc1db7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 641.230s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1282.537030] env[68798]: DEBUG oslo_concurrency.lockutils [None req-002969ef-b15a-4958-94e3-19b8c9c7b596 tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Lock "89f660c8-6efd-4789-90ee-67e42abc1db7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 442.994s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1282.537030] env[68798]: DEBUG oslo_concurrency.lockutils [None req-002969ef-b15a-4958-94e3-19b8c9c7b596 tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Acquiring lock "89f660c8-6efd-4789-90ee-67e42abc1db7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1282.537030] env[68798]: DEBUG oslo_concurrency.lockutils [None req-002969ef-b15a-4958-94e3-19b8c9c7b596 tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Lock "89f660c8-6efd-4789-90ee-67e42abc1db7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1282.537030] env[68798]: DEBUG oslo_concurrency.lockutils [None req-002969ef-b15a-4958-94e3-19b8c9c7b596 tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Lock "89f660c8-6efd-4789-90ee-67e42abc1db7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1282.538979] env[68798]: INFO nova.compute.manager [None req-002969ef-b15a-4958-94e3-19b8c9c7b596 tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Terminating instance [ 1282.541181] env[68798]: DEBUG nova.compute.manager [None req-002969ef-b15a-4958-94e3-19b8c9c7b596 tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1282.541732] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-002969ef-b15a-4958-94e3-19b8c9c7b596 tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1282.542527] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ee1e89db-fafd-47e1-8c65-699f5ca47fef {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.554384] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5484331-4242-426f-b4f5-2cebc0d26d06 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.569085] env[68798]: DEBUG nova.compute.manager [None req-0420805a-e4d1-461e-ae95-bed495d2f593 tempest-ImagesOneServerNegativeTestJSON-1064251384 tempest-ImagesOneServerNegativeTestJSON-1064251384-project-member] [instance: 6669c663-e5ca-4257-b7aa-f694b12f91d4] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1282.593461] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-002969ef-b15a-4958-94e3-19b8c9c7b596 tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 89f660c8-6efd-4789-90ee-67e42abc1db7 could not be found. [ 1282.594132] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-002969ef-b15a-4958-94e3-19b8c9c7b596 tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1282.595106] env[68798]: INFO nova.compute.manager [None req-002969ef-b15a-4958-94e3-19b8c9c7b596 tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1282.596839] env[68798]: DEBUG oslo.service.loopingcall [None req-002969ef-b15a-4958-94e3-19b8c9c7b596 tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1282.596839] env[68798]: DEBUG nova.compute.manager [-] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1282.596839] env[68798]: DEBUG nova.network.neutron [-] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1282.604562] env[68798]: DEBUG nova.compute.manager [None req-0420805a-e4d1-461e-ae95-bed495d2f593 tempest-ImagesOneServerNegativeTestJSON-1064251384 tempest-ImagesOneServerNegativeTestJSON-1064251384-project-member] [instance: 6669c663-e5ca-4257-b7aa-f694b12f91d4] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1282.640033] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0420805a-e4d1-461e-ae95-bed495d2f593 tempest-ImagesOneServerNegativeTestJSON-1064251384 tempest-ImagesOneServerNegativeTestJSON-1064251384-project-member] Lock "6669c663-e5ca-4257-b7aa-f694b12f91d4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 241.709s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1282.653532] env[68798]: DEBUG nova.compute.manager [None req-bd15a999-fe5f-4609-8f60-18101b2be48e tempest-VolumesAdminNegativeTest-2011042876 tempest-VolumesAdminNegativeTest-2011042876-project-member] [instance: addf38a1-9a3f-4e4f-ae0a-011fa96b344a] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1282.689830] env[68798]: DEBUG nova.compute.manager [None req-bd15a999-fe5f-4609-8f60-18101b2be48e tempest-VolumesAdminNegativeTest-2011042876 tempest-VolumesAdminNegativeTest-2011042876-project-member] [instance: addf38a1-9a3f-4e4f-ae0a-011fa96b344a] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1282.718689] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bd15a999-fe5f-4609-8f60-18101b2be48e tempest-VolumesAdminNegativeTest-2011042876 tempest-VolumesAdminNegativeTest-2011042876-project-member] Lock "addf38a1-9a3f-4e4f-ae0a-011fa96b344a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.061s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1282.734174] env[68798]: DEBUG nova.compute.manager [None req-93b54225-d1e4-4e32-a08d-1cc1974965f4 tempest-AttachVolumeNegativeTest-740411461 tempest-AttachVolumeNegativeTest-740411461-project-member] [instance: be9d913d-aeb6-4ae9-baca-d1733e9e5734] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1282.744205] env[68798]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68798) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1282.744472] env[68798]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-fd319c58-fca4-4695-b3e1-19e1367ca279'] [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1282.745225] env[68798]: ERROR oslo.service.loopingcall [ 1282.747286] env[68798]: ERROR nova.compute.manager [None req-002969ef-b15a-4958-94e3-19b8c9c7b596 tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1282.768757] env[68798]: DEBUG nova.compute.manager [None req-93b54225-d1e4-4e32-a08d-1cc1974965f4 tempest-AttachVolumeNegativeTest-740411461 tempest-AttachVolumeNegativeTest-740411461-project-member] [instance: be9d913d-aeb6-4ae9-baca-d1733e9e5734] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1282.790024] env[68798]: ERROR nova.compute.manager [None req-002969ef-b15a-4958-94e3-19b8c9c7b596 tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Traceback (most recent call last): [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] ret = obj(*args, **kwargs) [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] exception_handler_v20(status_code, error_body) [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] raise client_exc(message=error_message, [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Neutron server returns request_ids: ['req-fd319c58-fca4-4695-b3e1-19e1367ca279'] [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] During handling of the above exception, another exception occurred: [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Traceback (most recent call last): [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] self._delete_instance(context, instance, bdms) [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] self._shutdown_instance(context, instance, bdms) [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] self._try_deallocate_network(context, instance, requested_networks) [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] with excutils.save_and_reraise_exception(): [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] self.force_reraise() [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] raise self.value [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] _deallocate_network_with_retries() [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] return evt.wait() [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] result = hub.switch() [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] return self.greenlet.switch() [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] result = func(*self.args, **self.kw) [ 1282.790024] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] result = f(*args, **kwargs) [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] self._deallocate_network( [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] self.network_api.deallocate_for_instance( [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] data = neutron.list_ports(**search_opts) [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] ret = obj(*args, **kwargs) [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] return self.list('ports', self.ports_path, retrieve_all, [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] ret = obj(*args, **kwargs) [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] for r in self._pagination(collection, path, **params): [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] res = self.get(path, params=params) [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] ret = obj(*args, **kwargs) [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] return self.retry_request("GET", action, body=body, [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] ret = obj(*args, **kwargs) [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] return self.do_request(method, action, body=body, [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] ret = obj(*args, **kwargs) [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] self._handle_fault_response(status_code, replybody, resp) [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1282.791193] env[68798]: ERROR nova.compute.manager [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] [ 1282.791193] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1282.793985] env[68798]: DEBUG oslo_concurrency.lockutils [None req-93b54225-d1e4-4e32-a08d-1cc1974965f4 tempest-AttachVolumeNegativeTest-740411461 tempest-AttachVolumeNegativeTest-740411461-project-member] Lock "be9d913d-aeb6-4ae9-baca-d1733e9e5734" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.957s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1282.805266] env[68798]: DEBUG nova.compute.manager [None req-4a2e845b-7987-4555-af95-45b421417b0a tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: 78d16017-fa1f-4d77-9111-55d37a1463d3] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1282.833490] env[68798]: DEBUG oslo_concurrency.lockutils [None req-002969ef-b15a-4958-94e3-19b8c9c7b596 tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Lock "89f660c8-6efd-4789-90ee-67e42abc1db7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.298s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1282.835535] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "89f660c8-6efd-4789-90ee-67e42abc1db7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 107.327s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1282.835841] env[68798]: INFO nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] During sync_power_state the instance has a pending task (deleting). Skip. [ 1282.836085] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "89f660c8-6efd-4789-90ee-67e42abc1db7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1282.844737] env[68798]: DEBUG nova.compute.manager [None req-4a2e845b-7987-4555-af95-45b421417b0a tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: 78d16017-fa1f-4d77-9111-55d37a1463d3] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1282.873873] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4a2e845b-7987-4555-af95-45b421417b0a tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Lock "78d16017-fa1f-4d77-9111-55d37a1463d3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 222.470s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1282.884615] env[68798]: DEBUG nova.compute.manager [None req-8daa0e20-cebc-4e22-84cf-b29ac98ed8db tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: 8f21bf87-c969-40dc-bbdc-9b9c0302b3ee] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1282.915266] env[68798]: INFO nova.compute.manager [None req-002969ef-b15a-4958-94e3-19b8c9c7b596 tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] [instance: 89f660c8-6efd-4789-90ee-67e42abc1db7] Successfully reverted task state from None on failure for instance. [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server [None req-002969ef-b15a-4958-94e3-19b8c9c7b596 tempest-ServerExternalEventsTest-1021236582 tempest-ServerExternalEventsTest-1021236582-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-fd319c58-fca4-4695-b3e1-19e1367ca279'] [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server raise self.value [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server raise self.value [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server raise self.value [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 1282.920479] env[68798]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server raise self.value [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server raise self.value [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1282.922207] env[68798]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1282.928336] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1282.928336] env[68798]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1282.928336] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1282.928336] env[68798]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1282.928336] env[68798]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1282.928336] env[68798]: ERROR oslo_messaging.rpc.server [ 1282.928336] env[68798]: DEBUG nova.compute.manager [None req-8daa0e20-cebc-4e22-84cf-b29ac98ed8db tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: 8f21bf87-c969-40dc-bbdc-9b9c0302b3ee] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1282.947948] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8daa0e20-cebc-4e22-84cf-b29ac98ed8db tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Lock "8f21bf87-c969-40dc-bbdc-9b9c0302b3ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.994s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1282.960754] env[68798]: DEBUG nova.compute.manager [None req-92993540-b01d-4bad-a7fc-8bc797bb49d6 tempest-ServerMetadataTestJSON-1279570147 tempest-ServerMetadataTestJSON-1279570147-project-member] [instance: 6d29d382-0b65-4cce-a487-1e4096cb4907] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1282.989548] env[68798]: DEBUG nova.compute.manager [None req-92993540-b01d-4bad-a7fc-8bc797bb49d6 tempest-ServerMetadataTestJSON-1279570147 tempest-ServerMetadataTestJSON-1279570147-project-member] [instance: 6d29d382-0b65-4cce-a487-1e4096cb4907] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1283.012148] env[68798]: DEBUG oslo_concurrency.lockutils [None req-92993540-b01d-4bad-a7fc-8bc797bb49d6 tempest-ServerMetadataTestJSON-1279570147 tempest-ServerMetadataTestJSON-1279570147-project-member] Lock "6d29d382-0b65-4cce-a487-1e4096cb4907" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.039s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1283.025438] env[68798]: DEBUG nova.compute.manager [None req-0be1bef0-aa28-4213-a1fc-f04876a1f07c tempest-AttachInterfacesTestJSON-1343652290 tempest-AttachInterfacesTestJSON-1343652290-project-member] [instance: 1a5e8081-1a86-4c91-8139-469b7825fc47] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1283.043421] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1283.083535] env[68798]: DEBUG nova.compute.manager [None req-0be1bef0-aa28-4213-a1fc-f04876a1f07c tempest-AttachInterfacesTestJSON-1343652290 tempest-AttachInterfacesTestJSON-1343652290-project-member] [instance: 1a5e8081-1a86-4c91-8139-469b7825fc47] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1283.113017] env[68798]: DEBUG oslo_concurrency.lockutils [None req-0be1bef0-aa28-4213-a1fc-f04876a1f07c tempest-AttachInterfacesTestJSON-1343652290 tempest-AttachInterfacesTestJSON-1343652290-project-member] Lock "1a5e8081-1a86-4c91-8139-469b7825fc47" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.761s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1283.124821] env[68798]: DEBUG nova.compute.manager [None req-5f0e11eb-4ce2-48f2-ad45-55026832b1c4 tempest-AttachVolumeShelveTestJSON-1210845358 tempest-AttachVolumeShelveTestJSON-1210845358-project-member] [instance: 66a828dc-4a00-49d8-944a-0c8a90d56219] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1283.160518] env[68798]: DEBUG nova.compute.manager [None req-5f0e11eb-4ce2-48f2-ad45-55026832b1c4 tempest-AttachVolumeShelveTestJSON-1210845358 tempest-AttachVolumeShelveTestJSON-1210845358-project-member] [instance: 66a828dc-4a00-49d8-944a-0c8a90d56219] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1283.186272] env[68798]: DEBUG oslo_concurrency.lockutils [None req-5f0e11eb-4ce2-48f2-ad45-55026832b1c4 tempest-AttachVolumeShelveTestJSON-1210845358 tempest-AttachVolumeShelveTestJSON-1210845358-project-member] Lock "66a828dc-4a00-49d8-944a-0c8a90d56219" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.978s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1283.202668] env[68798]: DEBUG nova.compute.manager [None req-93be4323-1ede-4d85-ad9a-7c67057d49ce tempest-MultipleCreateTestJSON-252684274 tempest-MultipleCreateTestJSON-252684274-project-member] [instance: a4376aa4-f675-42e8-a908-a398ab8db455] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1283.241681] env[68798]: DEBUG nova.compute.manager [None req-93be4323-1ede-4d85-ad9a-7c67057d49ce tempest-MultipleCreateTestJSON-252684274 tempest-MultipleCreateTestJSON-252684274-project-member] [instance: a4376aa4-f675-42e8-a908-a398ab8db455] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1283.297020] env[68798]: DEBUG oslo_concurrency.lockutils [None req-93be4323-1ede-4d85-ad9a-7c67057d49ce tempest-MultipleCreateTestJSON-252684274 tempest-MultipleCreateTestJSON-252684274-project-member] Lock "a4376aa4-f675-42e8-a908-a398ab8db455" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.069s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1283.324151] env[68798]: DEBUG nova.compute.manager [None req-93be4323-1ede-4d85-ad9a-7c67057d49ce tempest-MultipleCreateTestJSON-252684274 tempest-MultipleCreateTestJSON-252684274-project-member] [instance: dab58ca2-ba8a-4f7d-acdd-dbb94b38ffbc] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1283.351357] env[68798]: DEBUG nova.compute.manager [None req-93be4323-1ede-4d85-ad9a-7c67057d49ce tempest-MultipleCreateTestJSON-252684274 tempest-MultipleCreateTestJSON-252684274-project-member] [instance: dab58ca2-ba8a-4f7d-acdd-dbb94b38ffbc] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1283.380656] env[68798]: DEBUG oslo_concurrency.lockutils [None req-93be4323-1ede-4d85-ad9a-7c67057d49ce tempest-MultipleCreateTestJSON-252684274 tempest-MultipleCreateTestJSON-252684274-project-member] Lock "dab58ca2-ba8a-4f7d-acdd-dbb94b38ffbc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.125s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1283.396489] env[68798]: DEBUG nova.compute.manager [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1283.478254] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.478254] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1283.479423] env[68798]: INFO nova.compute.claims [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1283.886267] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b65b7e4a-b41c-4b5b-923a-719914c842bf {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.895289] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d4beb0-ea01-4a8d-8616-7a4e2401d014 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.927551] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f49a30e-cd70-484b-bc99-70d564c1de39 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.937096] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b90b2f7-651d-4671-8f54-d41891865be0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.954022] env[68798]: DEBUG nova.compute.provider_tree [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1283.965906] env[68798]: DEBUG nova.scheduler.client.report [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1283.981371] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.503s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1283.981842] env[68798]: DEBUG nova.compute.manager [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1284.028178] env[68798]: DEBUG nova.compute.utils [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1284.029717] env[68798]: DEBUG nova.compute.manager [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1284.029952] env[68798]: DEBUG nova.network.neutron [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1284.042198] env[68798]: DEBUG nova.compute.manager [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1284.047666] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1284.048114] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1284.048114] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1284.083714] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1284.083938] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1284.084069] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1284.084255] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1284.084642] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1284.084642] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1284.084817] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1284.084885] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1284.085057] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1284.085282] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1284.085961] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1284.086295] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1284.096108] env[68798]: DEBUG nova.policy [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '757fa295b19c45f19779644a7a98a2f3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c031c59cfa314d4385e63191b316f9b9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 1284.118889] env[68798]: DEBUG nova.compute.manager [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1284.148792] env[68798]: DEBUG nova.virt.hardware [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1284.149069] env[68798]: DEBUG nova.virt.hardware [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1284.149313] env[68798]: DEBUG nova.virt.hardware [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1284.149441] env[68798]: DEBUG nova.virt.hardware [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1284.149572] env[68798]: DEBUG nova.virt.hardware [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1284.149725] env[68798]: DEBUG nova.virt.hardware [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1284.150009] env[68798]: DEBUG nova.virt.hardware [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1284.150252] env[68798]: DEBUG nova.virt.hardware [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1284.150445] env[68798]: DEBUG nova.virt.hardware [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1284.150650] env[68798]: DEBUG nova.virt.hardware [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1284.150859] env[68798]: DEBUG nova.virt.hardware [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1284.151780] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-707b4203-010c-428f-9176-f8ff62c828d7 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.161798] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a91b222b-9c94-4377-9f88-d6da5e2e8571 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.515045] env[68798]: DEBUG nova.network.neutron [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Successfully created port: a76fa84a-dae4-4349-b225-3ea90aa6984f {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1285.049296] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1285.366763] env[68798]: DEBUG nova.compute.manager [req-b6c5109d-d142-459b-aa5a-0f0adca355e2 req-18e6cc97-b58e-44f0-98f7-293546a94215 service nova] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Received event network-vif-plugged-a76fa84a-dae4-4349-b225-3ea90aa6984f {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1285.367016] env[68798]: DEBUG oslo_concurrency.lockutils [req-b6c5109d-d142-459b-aa5a-0f0adca355e2 req-18e6cc97-b58e-44f0-98f7-293546a94215 service nova] Acquiring lock "da5d9023-f6c1-44f8-9465-36aa2b109924-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1285.367250] env[68798]: DEBUG oslo_concurrency.lockutils [req-b6c5109d-d142-459b-aa5a-0f0adca355e2 req-18e6cc97-b58e-44f0-98f7-293546a94215 service nova] Lock "da5d9023-f6c1-44f8-9465-36aa2b109924-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1285.367439] env[68798]: DEBUG oslo_concurrency.lockutils [req-b6c5109d-d142-459b-aa5a-0f0adca355e2 req-18e6cc97-b58e-44f0-98f7-293546a94215 service nova] Lock "da5d9023-f6c1-44f8-9465-36aa2b109924-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1285.367727] env[68798]: DEBUG nova.compute.manager [req-b6c5109d-d142-459b-aa5a-0f0adca355e2 req-18e6cc97-b58e-44f0-98f7-293546a94215 service nova] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] No waiting events found dispatching network-vif-plugged-a76fa84a-dae4-4349-b225-3ea90aa6984f {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1285.367974] env[68798]: WARNING nova.compute.manager [req-b6c5109d-d142-459b-aa5a-0f0adca355e2 req-18e6cc97-b58e-44f0-98f7-293546a94215 service nova] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Received unexpected event network-vif-plugged-a76fa84a-dae4-4349-b225-3ea90aa6984f for instance with vm_state building and task_state spawning. [ 1285.473991] env[68798]: DEBUG nova.network.neutron [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Successfully updated port: a76fa84a-dae4-4349-b225-3ea90aa6984f {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1285.491898] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Acquiring lock "refresh_cache-da5d9023-f6c1-44f8-9465-36aa2b109924" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1285.492251] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Acquired lock "refresh_cache-da5d9023-f6c1-44f8-9465-36aa2b109924" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1285.492251] env[68798]: DEBUG nova.network.neutron [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1285.554610] env[68798]: DEBUG nova.network.neutron [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1285.823243] env[68798]: DEBUG nova.network.neutron [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Updating instance_info_cache with network_info: [{"id": "a76fa84a-dae4-4349-b225-3ea90aa6984f", "address": "fa:16:3e:23:45:48", "network": {"id": "11db627b-610b-4436-a9bf-a3bfb2ca764f", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-837000134-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c031c59cfa314d4385e63191b316f9b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa76fa84a-da", "ovs_interfaceid": "a76fa84a-dae4-4349-b225-3ea90aa6984f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1285.841183] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Releasing lock "refresh_cache-da5d9023-f6c1-44f8-9465-36aa2b109924" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1285.841745] env[68798]: DEBUG nova.compute.manager [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Instance network_info: |[{"id": "a76fa84a-dae4-4349-b225-3ea90aa6984f", "address": "fa:16:3e:23:45:48", "network": {"id": "11db627b-610b-4436-a9bf-a3bfb2ca764f", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-837000134-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c031c59cfa314d4385e63191b316f9b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa76fa84a-da", "ovs_interfaceid": "a76fa84a-dae4-4349-b225-3ea90aa6984f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1285.842287] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:45:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0a76279-3c11-4bef-b124-2a2ee13fa377', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a76fa84a-dae4-4349-b225-3ea90aa6984f', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1285.849291] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Creating folder: Project (c031c59cfa314d4385e63191b316f9b9). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1285.849882] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-10d02d55-ee49-4c66-b19d-992a2ddb0f29 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.862494] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Created folder: Project (c031c59cfa314d4385e63191b316f9b9) in parent group-v834492. [ 1285.863232] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Creating folder: Instances. Parent ref: group-v834562. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1285.863232] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0b1e186f-38da-4566-a3d1-be86c4bc4809 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.877552] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Created folder: Instances in parent group-v834562. [ 1285.877825] env[68798]: DEBUG oslo.service.loopingcall [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1285.878047] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1285.878277] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d47f1af0-dea2-4b49-86e1-594be9a8b57b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.899615] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1285.899615] env[68798]: value = "task-4217627" [ 1285.899615] env[68798]: _type = "Task" [ 1285.899615] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.909384] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217627, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.048330] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1286.048576] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1286.048725] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1286.411347] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217627, 'name': CreateVM_Task, 'duration_secs': 0.30999} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.411711] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1286.412272] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1286.412426] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1286.412756] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1286.413015] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2bde5408-7e74-4b3b-bf62-86a8ed2f29ac {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.419728] env[68798]: DEBUG oslo_vmware.api [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Waiting for the task: (returnval){ [ 1286.419728] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]5248bc08-b661-0916-232f-5377d4c9e674" [ 1286.419728] env[68798]: _type = "Task" [ 1286.419728] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.432255] env[68798]: DEBUG oslo_vmware.api [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]5248bc08-b661-0916-232f-5377d4c9e674, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.931952] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1286.932261] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1286.932492] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1287.049276] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1287.402108] env[68798]: DEBUG nova.compute.manager [req-8c9b8633-4813-4050-9d6b-40f97eb7a964 req-92134654-b602-4fa3-9df0-a4dda0cf7831 service nova] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Received event network-changed-a76fa84a-dae4-4349-b225-3ea90aa6984f {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1287.402108] env[68798]: DEBUG nova.compute.manager [req-8c9b8633-4813-4050-9d6b-40f97eb7a964 req-92134654-b602-4fa3-9df0-a4dda0cf7831 service nova] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Refreshing instance network info cache due to event network-changed-a76fa84a-dae4-4349-b225-3ea90aa6984f. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1287.402108] env[68798]: DEBUG oslo_concurrency.lockutils [req-8c9b8633-4813-4050-9d6b-40f97eb7a964 req-92134654-b602-4fa3-9df0-a4dda0cf7831 service nova] Acquiring lock "refresh_cache-da5d9023-f6c1-44f8-9465-36aa2b109924" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1287.402108] env[68798]: DEBUG oslo_concurrency.lockutils [req-8c9b8633-4813-4050-9d6b-40f97eb7a964 req-92134654-b602-4fa3-9df0-a4dda0cf7831 service nova] Acquired lock "refresh_cache-da5d9023-f6c1-44f8-9465-36aa2b109924" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1287.402108] env[68798]: DEBUG nova.network.neutron [req-8c9b8633-4813-4050-9d6b-40f97eb7a964 req-92134654-b602-4fa3-9df0-a4dda0cf7831 service nova] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Refreshing network info cache for port a76fa84a-dae4-4349-b225-3ea90aa6984f {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1287.833625] env[68798]: DEBUG nova.network.neutron [req-8c9b8633-4813-4050-9d6b-40f97eb7a964 req-92134654-b602-4fa3-9df0-a4dda0cf7831 service nova] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Updated VIF entry in instance network info cache for port a76fa84a-dae4-4349-b225-3ea90aa6984f. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1287.833996] env[68798]: DEBUG nova.network.neutron [req-8c9b8633-4813-4050-9d6b-40f97eb7a964 req-92134654-b602-4fa3-9df0-a4dda0cf7831 service nova] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Updating instance_info_cache with network_info: [{"id": "a76fa84a-dae4-4349-b225-3ea90aa6984f", "address": "fa:16:3e:23:45:48", "network": {"id": "11db627b-610b-4436-a9bf-a3bfb2ca764f", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-837000134-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c031c59cfa314d4385e63191b316f9b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa76fa84a-da", "ovs_interfaceid": "a76fa84a-dae4-4349-b225-3ea90aa6984f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1287.847512] env[68798]: DEBUG oslo_concurrency.lockutils [req-8c9b8633-4813-4050-9d6b-40f97eb7a964 req-92134654-b602-4fa3-9df0-a4dda0cf7831 service nova] Releasing lock "refresh_cache-da5d9023-f6c1-44f8-9465-36aa2b109924" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1289.644554] env[68798]: DEBUG oslo_concurrency.lockutils [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Acquiring lock "36980008-f639-4c88-afcf-0dba40420b87" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1289.644554] env[68798]: DEBUG oslo_concurrency.lockutils [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Lock "36980008-f639-4c88-afcf-0dba40420b87" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1290.046941] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1290.085617] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1290.100836] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1290.101128] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1290.101309] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1290.101467] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1290.103454] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b99b82b5-de78-4085-ad9e-e8767461950b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.115494] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3057b3c5-ee72-4362-8fb5-9570f4d6e5a1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.136513] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9f2f584-23fc-4345-afcd-9a4b58a3c8b8 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.147228] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56013be0-8b87-4d96-b139-fbea26bcb189 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.185289] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180762MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1290.185289] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1290.185446] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1290.271051] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 9923a3c7-f090-4a01-8c57-36c8c22c6b14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1290.271147] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1290.271269] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e848c3f4-64ff-4956-88e0-afa27be73068 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1290.271395] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 30e8027d-98b3-4a5f-9eb4-244846cb90e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1290.271530] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance a7060037-2580-464a-b434-90ffe7314bd1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1290.271651] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 1ae2e411-d8e4-4abb-8c7b-b907ebba094c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1290.271768] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1290.271884] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 5e53196f-984a-4d72-8e00-861ef0751dca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1290.272007] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 7bea1932-0490-409b-99b0-bd1f3f1a9d5d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1290.272172] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance da5d9023-f6c1-44f8-9465-36aa2b109924 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1290.287716] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance a2cee0f3-08b3-4a25-9d5e-9760604ff948 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1290.302034] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e75b2848-5dfa-4ffa-b37a-6338c8221dd3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1290.314698] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance cbe4e626-f063-4877-985f-b3e36c161c9e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1290.328000] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 17cce398-d2f8-47a6-b714-c4e54caec516 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1290.340375] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ca976c34-4eb0-46aa-a243-91401f842c32 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1290.352772] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 6cf9a284-56a7-4780-b7a1-fedf77f8231a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1290.366028] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 802f2573-8a44-489d-a0a0-32ca69dc6281 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1290.382465] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 19702e1f-2d11-492c-9e9e-067d1aa2b6a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1290.395793] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e42bca43-6e9e-49d5-8cbd-4c57e5f0123b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1290.410460] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance de6bd0a0-27c9-4f6b-932f-d7fac5fd2e4c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1290.426098] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 17199829-f72a-4ba6-93b4-da057f00bbc7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1290.438196] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance dcced840-b57b-47bd-8d7b-bfe971290659 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1290.451425] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 36980008-f639-4c88-afcf-0dba40420b87 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1290.451757] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1290.451913] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1290.787932] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b809fd-3a9f-44ae-815a-22a254dd47d8 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.797585] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-281b7965-3ffc-400c-b99c-089427e16b86 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.829851] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b77ea18-2628-4882-bebd-730689819702 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.837935] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18851849-ccfe-4b10-b171-4b3c92c79c3d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.852949] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1290.863157] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1290.878670] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1290.878871] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.693s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1299.394985] env[68798]: DEBUG oslo_concurrency.lockutils [None req-1c7428ac-0d95-4dd9-b70a-df297e33044e tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Acquiring lock "da5d9023-f6c1-44f8-9465-36aa2b109924" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1301.302455] env[68798]: DEBUG oslo_concurrency.lockutils [None req-707a0910-729f-467a-832a-0b7cd7e429f1 tempest-InstanceActionsV221TestJSON-115434527 tempest-InstanceActionsV221TestJSON-115434527-project-member] Acquiring lock "1a5de688-91c2-4197-a396-c0df71fdbeda" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1301.302754] env[68798]: DEBUG oslo_concurrency.lockutils [None req-707a0910-729f-467a-832a-0b7cd7e429f1 tempest-InstanceActionsV221TestJSON-115434527 tempest-InstanceActionsV221TestJSON-115434527-project-member] Lock "1a5de688-91c2-4197-a396-c0df71fdbeda" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1308.057940] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d83a0c3d-3e8f-4aac-9666-cb3ba610c52f tempest-ServerShowV254Test-1493978018 tempest-ServerShowV254Test-1493978018-project-member] Acquiring lock "c142b101-e8b0-4073-9079-5c9730eac176" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1308.058398] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d83a0c3d-3e8f-4aac-9666-cb3ba610c52f tempest-ServerShowV254Test-1493978018 tempest-ServerShowV254Test-1493978018-project-member] Lock "c142b101-e8b0-4073-9079-5c9730eac176" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1329.409346] env[68798]: WARNING oslo_vmware.rw_handles [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1329.409346] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1329.409346] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1329.409346] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1329.409346] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1329.409346] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 1329.409346] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1329.409346] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1329.409346] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1329.409346] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1329.409346] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1329.409346] env[68798]: ERROR oslo_vmware.rw_handles [ 1329.409988] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/cffb6a90-9f4e-4f3f-85b4-9946f0120baf/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1329.411793] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1329.412064] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Copying Virtual Disk [datastore1] vmware_temp/cffb6a90-9f4e-4f3f-85b4-9946f0120baf/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/cffb6a90-9f4e-4f3f-85b4-9946f0120baf/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1329.412380] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f4482b40-1af5-4895-9d6d-155675e0f302 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.420810] env[68798]: DEBUG oslo_vmware.api [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Waiting for the task: (returnval){ [ 1329.420810] env[68798]: value = "task-4217628" [ 1329.420810] env[68798]: _type = "Task" [ 1329.420810] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.429158] env[68798]: DEBUG oslo_vmware.api [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Task: {'id': task-4217628, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.931880] env[68798]: DEBUG oslo_vmware.exceptions [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1329.932129] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1329.932721] env[68798]: ERROR nova.compute.manager [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1329.932721] env[68798]: Faults: ['InvalidArgument'] [ 1329.932721] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Traceback (most recent call last): [ 1329.932721] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1329.932721] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] yield resources [ 1329.932721] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1329.932721] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] self.driver.spawn(context, instance, image_meta, [ 1329.932721] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1329.932721] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1329.932721] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1329.932721] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] self._fetch_image_if_missing(context, vi) [ 1329.932721] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1329.932721] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] image_cache(vi, tmp_image_ds_loc) [ 1329.932721] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1329.932721] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] vm_util.copy_virtual_disk( [ 1329.932721] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1329.932721] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] session._wait_for_task(vmdk_copy_task) [ 1329.932721] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1329.932721] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] return self.wait_for_task(task_ref) [ 1329.932721] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1329.932721] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] return evt.wait() [ 1329.932721] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1329.932721] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] result = hub.switch() [ 1329.932721] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1329.932721] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] return self.greenlet.switch() [ 1329.932721] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1329.932721] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] self.f(*self.args, **self.kw) [ 1329.932721] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1329.932721] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] raise exceptions.translate_fault(task_info.error) [ 1329.932721] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1329.932721] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Faults: ['InvalidArgument'] [ 1329.932721] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] [ 1329.933562] env[68798]: INFO nova.compute.manager [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Terminating instance [ 1329.934673] env[68798]: DEBUG oslo_concurrency.lockutils [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1329.934919] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1329.935187] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-14829871-b0ff-4523-a143-c05a53504087 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.937610] env[68798]: DEBUG nova.compute.manager [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1329.937839] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1329.938617] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-369cc9c8-386b-4a3e-8940-c08a45fdb7c5 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.946421] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1329.946684] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5e7dda7f-189c-4ac5-b765-86af127a34d5 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.949063] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1329.949238] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1329.950225] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdb778a1-0b2a-48f5-9860-392103fa5e60 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.955738] env[68798]: DEBUG oslo_vmware.api [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Waiting for the task: (returnval){ [ 1329.955738] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]5235e6bc-a6cc-6dc4-6ba9-a9ebc390a378" [ 1329.955738] env[68798]: _type = "Task" [ 1329.955738] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.963309] env[68798]: DEBUG oslo_vmware.api [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]5235e6bc-a6cc-6dc4-6ba9-a9ebc390a378, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.025404] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1330.025794] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1330.025896] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Deleting the datastore file [datastore1] 9923a3c7-f090-4a01-8c57-36c8c22c6b14 {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1330.026274] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a8766bfd-0b94-4f6f-a40d-09e21c9366cc {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.032812] env[68798]: DEBUG oslo_vmware.api [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Waiting for the task: (returnval){ [ 1330.032812] env[68798]: value = "task-4217630" [ 1330.032812] env[68798]: _type = "Task" [ 1330.032812] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.040903] env[68798]: DEBUG oslo_vmware.api [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Task: {'id': task-4217630, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.466136] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1330.466470] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Creating directory with path [datastore1] vmware_temp/d3fcda4e-dfe5-433d-a884-fd579a629e91/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1330.466605] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6b23358a-f359-4b58-904c-af9ecf9fc033 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.479115] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Created directory with path [datastore1] vmware_temp/d3fcda4e-dfe5-433d-a884-fd579a629e91/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1330.479338] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Fetch image to [datastore1] vmware_temp/d3fcda4e-dfe5-433d-a884-fd579a629e91/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1330.479518] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/d3fcda4e-dfe5-433d-a884-fd579a629e91/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1330.480327] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71bcb1b3-8fbb-47dc-8ea5-300d3f7be6be {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.487702] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-837ed02d-2925-4880-96b2-c5d67dc5b799 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.498236] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab503dc-1600-47fd-a062-c451533ee90d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.529273] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d53eb74c-e203-4a1c-aa44-52f8a853aa6a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.537884] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-15b3acc0-29c9-44bc-852d-af90f03e5f49 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.542189] env[68798]: DEBUG oslo_vmware.api [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Task: {'id': task-4217630, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075152} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.542752] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1330.542998] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1330.543215] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1330.543407] env[68798]: INFO nova.compute.manager [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1330.545547] env[68798]: DEBUG nova.compute.claims [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1330.545741] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1330.545978] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1330.563712] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1330.709267] env[68798]: DEBUG oslo_concurrency.lockutils [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1330.711092] env[68798]: ERROR nova.compute.manager [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70. [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Traceback (most recent call last): [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] result = getattr(controller, method)(*args, **kwargs) [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return self._get(image_id) [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] resp, body = self.http_client.get(url, headers=header) [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return self.request(url, 'GET', **kwargs) [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return self._handle_response(resp) [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] raise exc.from_response(resp, resp.content) [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] During handling of the above exception, another exception occurred: [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Traceback (most recent call last): [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] yield resources [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] self.driver.spawn(context, instance, image_meta, [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] self._fetch_image_if_missing(context, vi) [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] image_fetch(context, vi, tmp_image_ds_loc) [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] images.fetch_image( [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1330.711092] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] metadata = IMAGE_API.get(context, image_ref) [ 1330.712052] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1330.712052] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return session.show(context, image_id, [ 1330.712052] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1330.712052] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] _reraise_translated_image_exception(image_id) [ 1330.712052] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1330.712052] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] raise new_exc.with_traceback(exc_trace) [ 1330.712052] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1330.712052] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1330.712052] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1330.712052] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] result = getattr(controller, method)(*args, **kwargs) [ 1330.712052] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1330.712052] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return self._get(image_id) [ 1330.712052] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1330.712052] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1330.712052] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1330.712052] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] resp, body = self.http_client.get(url, headers=header) [ 1330.712052] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1330.712052] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return self.request(url, 'GET', **kwargs) [ 1330.712052] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1330.712052] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return self._handle_response(resp) [ 1330.712052] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1330.712052] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] raise exc.from_response(resp, resp.content) [ 1330.712052] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] nova.exception.ImageNotAuthorized: Not authorized for image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70. [ 1330.712052] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] [ 1330.712052] env[68798]: INFO nova.compute.manager [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Terminating instance [ 1330.712553] env[68798]: DEBUG oslo_concurrency.lockutils [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1330.712553] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1330.712796] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13d68536-7a4d-4658-aa58-5f7588a04389 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.715661] env[68798]: DEBUG nova.compute.manager [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1330.715894] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1330.716688] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5253f8b0-ce8f-4263-9473-d269d450f99c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.728043] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1330.728387] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-43d35a18-44b5-471b-8876-162efe8e3ad6 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.731913] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1330.732131] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1330.733440] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a9ae65a-8d7b-4db7-b226-1321fdc44a61 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.743979] env[68798]: DEBUG oslo_vmware.api [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Waiting for the task: (returnval){ [ 1330.743979] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52a69a1f-9372-ddc3-94d6-b33e0222ec2c" [ 1330.743979] env[68798]: _type = "Task" [ 1330.743979] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.758198] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1330.758499] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Creating directory with path [datastore1] vmware_temp/9a0c4652-d626-4553-8c28-459af2cbce26/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1330.758769] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce7a5cd2-30e1-4be1-bcdf-40634045c088 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.779835] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Created directory with path [datastore1] vmware_temp/9a0c4652-d626-4553-8c28-459af2cbce26/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1330.779983] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Fetch image to [datastore1] vmware_temp/9a0c4652-d626-4553-8c28-459af2cbce26/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1330.780181] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/9a0c4652-d626-4553-8c28-459af2cbce26/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1330.781045] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17c33b7f-078a-406c-b96d-02661c2c6d0a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.788816] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-651ec3e7-09af-4431-bcdc-ad0945cfe7b0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.803479] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9e93186-efe7-4a33-8bea-1b79968dbbf6 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.841985] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba9e916a-0403-4401-9440-3963c16a3b37 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.844536] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1330.844734] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1330.844911] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Deleting the datastore file [datastore1] fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1330.845350] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2e8ac77d-341f-4f15-887b-870e85b0e372 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.852930] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-08fd775e-868c-4146-b015-4e53d2130bf0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.854659] env[68798]: DEBUG oslo_vmware.api [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Waiting for the task: (returnval){ [ 1330.854659] env[68798]: value = "task-4217632" [ 1330.854659] env[68798]: _type = "Task" [ 1330.854659] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.874846] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1330.932660] env[68798]: DEBUG oslo_vmware.rw_handles [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9a0c4652-d626-4553-8c28-459af2cbce26/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1330.993966] env[68798]: DEBUG oslo_vmware.rw_handles [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1330.993966] env[68798]: DEBUG oslo_vmware.rw_handles [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9a0c4652-d626-4553-8c28-459af2cbce26/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1331.017023] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-496c26cd-e2ae-4048-961a-8ff63ba48a56 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.023849] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b6b990-8d36-4688-9c5f-cfdf57532f72 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.055199] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f960c1b-e063-4320-bde9-efeec39454e7 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.063191] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f16337e-dd85-497d-b085-8deee8fea813 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.077082] env[68798]: DEBUG nova.compute.provider_tree [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1331.086724] env[68798]: DEBUG nova.scheduler.client.report [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1331.102909] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.557s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1331.104158] env[68798]: ERROR nova.compute.manager [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1331.104158] env[68798]: Faults: ['InvalidArgument'] [ 1331.104158] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Traceback (most recent call last): [ 1331.104158] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1331.104158] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] self.driver.spawn(context, instance, image_meta, [ 1331.104158] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1331.104158] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1331.104158] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1331.104158] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] self._fetch_image_if_missing(context, vi) [ 1331.104158] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1331.104158] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] image_cache(vi, tmp_image_ds_loc) [ 1331.104158] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1331.104158] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] vm_util.copy_virtual_disk( [ 1331.104158] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1331.104158] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] session._wait_for_task(vmdk_copy_task) [ 1331.104158] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1331.104158] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] return self.wait_for_task(task_ref) [ 1331.104158] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1331.104158] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] return evt.wait() [ 1331.104158] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1331.104158] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] result = hub.switch() [ 1331.104158] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1331.104158] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] return self.greenlet.switch() [ 1331.104158] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1331.104158] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] self.f(*self.args, **self.kw) [ 1331.104158] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1331.104158] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] raise exceptions.translate_fault(task_info.error) [ 1331.104158] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1331.104158] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Faults: ['InvalidArgument'] [ 1331.104158] env[68798]: ERROR nova.compute.manager [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] [ 1331.104884] env[68798]: DEBUG nova.compute.utils [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1331.106190] env[68798]: DEBUG nova.compute.manager [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Build of instance 9923a3c7-f090-4a01-8c57-36c8c22c6b14 was re-scheduled: A specified parameter was not correct: fileType [ 1331.106190] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1331.106621] env[68798]: DEBUG nova.compute.manager [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1331.106856] env[68798]: DEBUG nova.compute.manager [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1331.107101] env[68798]: DEBUG nova.compute.manager [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1331.107312] env[68798]: DEBUG nova.network.neutron [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1331.366261] env[68798]: DEBUG oslo_vmware.api [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Task: {'id': task-4217632, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.08105} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.368577] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1331.368577] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1331.368577] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1331.368577] env[68798]: INFO nova.compute.manager [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Took 0.65 seconds to destroy the instance on the hypervisor. [ 1331.369406] env[68798]: DEBUG nova.compute.claims [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1331.369716] env[68798]: DEBUG oslo_concurrency.lockutils [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.370057] env[68798]: DEBUG oslo_concurrency.lockutils [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.606191] env[68798]: DEBUG nova.network.neutron [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1331.625460] env[68798]: INFO nova.compute.manager [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Took 0.52 seconds to deallocate network for instance. [ 1331.732365] env[68798]: INFO nova.scheduler.client.report [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Deleted allocations for instance 9923a3c7-f090-4a01-8c57-36c8c22c6b14 [ 1331.758685] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f5df27ea-c7a5-4f6e-a231-c201b450a1a0 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Lock "9923a3c7-f090-4a01-8c57-36c8c22c6b14" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 679.242s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1331.760219] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ff2e111a-9807-469c-b34c-2f5119d14917 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Lock "9923a3c7-f090-4a01-8c57-36c8c22c6b14" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 480.441s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.760461] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ff2e111a-9807-469c-b34c-2f5119d14917 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Acquiring lock "9923a3c7-f090-4a01-8c57-36c8c22c6b14-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.760752] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ff2e111a-9807-469c-b34c-2f5119d14917 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Lock "9923a3c7-f090-4a01-8c57-36c8c22c6b14-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.761015] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ff2e111a-9807-469c-b34c-2f5119d14917 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Lock "9923a3c7-f090-4a01-8c57-36c8c22c6b14-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1331.765710] env[68798]: INFO nova.compute.manager [None req-ff2e111a-9807-469c-b34c-2f5119d14917 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Terminating instance [ 1331.767724] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ff2e111a-9807-469c-b34c-2f5119d14917 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Acquiring lock "refresh_cache-9923a3c7-f090-4a01-8c57-36c8c22c6b14" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1331.767884] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ff2e111a-9807-469c-b34c-2f5119d14917 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Acquired lock "refresh_cache-9923a3c7-f090-4a01-8c57-36c8c22c6b14" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1331.768066] env[68798]: DEBUG nova.network.neutron [None req-ff2e111a-9807-469c-b34c-2f5119d14917 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1331.775606] env[68798]: DEBUG nova.compute.manager [None req-cb289113-67bc-4f65-ba5c-caff52187dd5 tempest-AttachInterfacesUnderV243Test-1049645439 tempest-AttachInterfacesUnderV243Test-1049645439-project-member] [instance: a2cee0f3-08b3-4a25-9d5e-9760604ff948] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1331.808652] env[68798]: DEBUG nova.network.neutron [None req-ff2e111a-9807-469c-b34c-2f5119d14917 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1331.817117] env[68798]: DEBUG nova.compute.manager [None req-cb289113-67bc-4f65-ba5c-caff52187dd5 tempest-AttachInterfacesUnderV243Test-1049645439 tempest-AttachInterfacesUnderV243Test-1049645439-project-member] [instance: a2cee0f3-08b3-4a25-9d5e-9760604ff948] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1331.842334] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d09e01b7-c048-437d-b8c2-77129d4843c7 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.845886] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cb289113-67bc-4f65-ba5c-caff52187dd5 tempest-AttachInterfacesUnderV243Test-1049645439 tempest-AttachInterfacesUnderV243Test-1049645439-project-member] Lock "a2cee0f3-08b3-4a25-9d5e-9760604ff948" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 223.165s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1331.852711] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76e8d006-6b65-412e-8876-eeb3c67f7b15 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.857724] env[68798]: DEBUG nova.compute.manager [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1331.885740] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f641d76a-3a0b-4f99-9fae-a80b9c014f2f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.897694] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd235e58-f526-4466-94c4-a42202812584 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.916237] env[68798]: DEBUG nova.compute.provider_tree [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1331.918105] env[68798]: DEBUG oslo_concurrency.lockutils [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.925488] env[68798]: DEBUG nova.scheduler.client.report [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1331.940780] env[68798]: DEBUG oslo_concurrency.lockutils [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.571s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1331.941585] env[68798]: ERROR nova.compute.manager [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70. [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Traceback (most recent call last): [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] result = getattr(controller, method)(*args, **kwargs) [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return self._get(image_id) [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] resp, body = self.http_client.get(url, headers=header) [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return self.request(url, 'GET', **kwargs) [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return self._handle_response(resp) [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] raise exc.from_response(resp, resp.content) [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] During handling of the above exception, another exception occurred: [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Traceback (most recent call last): [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] self.driver.spawn(context, instance, image_meta, [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] self._fetch_image_if_missing(context, vi) [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] image_fetch(context, vi, tmp_image_ds_loc) [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] images.fetch_image( [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] metadata = IMAGE_API.get(context, image_ref) [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1331.941585] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return session.show(context, image_id, [ 1331.942745] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1331.942745] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] _reraise_translated_image_exception(image_id) [ 1331.942745] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1331.942745] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] raise new_exc.with_traceback(exc_trace) [ 1331.942745] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1331.942745] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1331.942745] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1331.942745] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] result = getattr(controller, method)(*args, **kwargs) [ 1331.942745] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1331.942745] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return self._get(image_id) [ 1331.942745] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1331.942745] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1331.942745] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1331.942745] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] resp, body = self.http_client.get(url, headers=header) [ 1331.942745] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1331.942745] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return self.request(url, 'GET', **kwargs) [ 1331.942745] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1331.942745] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return self._handle_response(resp) [ 1331.942745] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1331.942745] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] raise exc.from_response(resp, resp.content) [ 1331.942745] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] nova.exception.ImageNotAuthorized: Not authorized for image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70. [ 1331.942745] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] [ 1331.942745] env[68798]: DEBUG nova.compute.utils [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Not authorized for image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70. {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1331.943591] env[68798]: DEBUG oslo_concurrency.lockutils [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.026s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.945044] env[68798]: INFO nova.compute.claims [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1331.947749] env[68798]: DEBUG nova.compute.manager [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Build of instance fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a was re-scheduled: Not authorized for image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1331.948260] env[68798]: DEBUG nova.compute.manager [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1331.948450] env[68798]: DEBUG nova.compute.manager [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1331.948621] env[68798]: DEBUG nova.compute.manager [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1331.948787] env[68798]: DEBUG nova.network.neutron [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1331.967632] env[68798]: DEBUG nova.network.neutron [None req-ff2e111a-9807-469c-b34c-2f5119d14917 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1331.976788] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ff2e111a-9807-469c-b34c-2f5119d14917 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Releasing lock "refresh_cache-9923a3c7-f090-4a01-8c57-36c8c22c6b14" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1331.977465] env[68798]: DEBUG nova.compute.manager [None req-ff2e111a-9807-469c-b34c-2f5119d14917 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1331.977563] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2e111a-9807-469c-b34c-2f5119d14917 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1331.978096] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ea381948-ef06-4c1c-94ff-2db4f8ad3eff {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.994518] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc749d4-7c8e-476a-ab23-6bb960889f66 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.027448] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-ff2e111a-9807-469c-b34c-2f5119d14917 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9923a3c7-f090-4a01-8c57-36c8c22c6b14 could not be found. [ 1332.027448] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ff2e111a-9807-469c-b34c-2f5119d14917 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1332.027448] env[68798]: INFO nova.compute.manager [None req-ff2e111a-9807-469c-b34c-2f5119d14917 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1332.027640] env[68798]: DEBUG oslo.service.loopingcall [None req-ff2e111a-9807-469c-b34c-2f5119d14917 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1332.030335] env[68798]: DEBUG nova.compute.manager [-] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1332.030463] env[68798]: DEBUG nova.network.neutron [-] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1332.057607] env[68798]: DEBUG nova.network.neutron [-] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1332.066596] env[68798]: DEBUG nova.network.neutron [-] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1332.078856] env[68798]: INFO nova.compute.manager [-] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] Took 0.05 seconds to deallocate network for instance. [ 1332.130810] env[68798]: DEBUG neutronclient.v2_0.client [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68798) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1332.131986] env[68798]: ERROR nova.compute.manager [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Traceback (most recent call last): [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] result = getattr(controller, method)(*args, **kwargs) [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return self._get(image_id) [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] resp, body = self.http_client.get(url, headers=header) [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return self.request(url, 'GET', **kwargs) [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return self._handle_response(resp) [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] raise exc.from_response(resp, resp.content) [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] During handling of the above exception, another exception occurred: [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Traceback (most recent call last): [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] self.driver.spawn(context, instance, image_meta, [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] self._fetch_image_if_missing(context, vi) [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] image_fetch(context, vi, tmp_image_ds_loc) [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] images.fetch_image( [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] metadata = IMAGE_API.get(context, image_ref) [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1332.131986] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return session.show(context, image_id, [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] _reraise_translated_image_exception(image_id) [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] raise new_exc.with_traceback(exc_trace) [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] result = getattr(controller, method)(*args, **kwargs) [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return self._get(image_id) [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] resp, body = self.http_client.get(url, headers=header) [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return self.request(url, 'GET', **kwargs) [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return self._handle_response(resp) [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] raise exc.from_response(resp, resp.content) [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] nova.exception.ImageNotAuthorized: Not authorized for image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70. [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] During handling of the above exception, another exception occurred: [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Traceback (most recent call last): [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/compute/manager.py", line 2448, in _do_build_and_run_instance [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] self._build_and_run_instance(context, instance, image, [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/compute/manager.py", line 2740, in _build_and_run_instance [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] raise exception.RescheduledException( [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] nova.exception.RescheduledException: Build of instance fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a was re-scheduled: Not authorized for image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70. [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] During handling of the above exception, another exception occurred: [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Traceback (most recent call last): [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] ret = obj(*args, **kwargs) [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] exception_handler_v20(status_code, error_body) [ 1332.133466] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] raise client_exc(message=error_message, [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Neutron server returns request_ids: ['req-15baac2f-f367-4619-91c2-b2a5b7ab59c5'] [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] During handling of the above exception, another exception occurred: [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Traceback (most recent call last): [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/compute/manager.py", line 3037, in _cleanup_allocated_networks [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] self._deallocate_network(context, instance, requested_networks) [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] self.network_api.deallocate_for_instance( [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] data = neutron.list_ports(**search_opts) [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] ret = obj(*args, **kwargs) [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return self.list('ports', self.ports_path, retrieve_all, [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] ret = obj(*args, **kwargs) [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] for r in self._pagination(collection, path, **params): [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] res = self.get(path, params=params) [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] ret = obj(*args, **kwargs) [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return self.retry_request("GET", action, body=body, [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] ret = obj(*args, **kwargs) [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return self.do_request(method, action, body=body, [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] ret = obj(*args, **kwargs) [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] self._handle_fault_response(status_code, replybody, resp) [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] raise exception.Unauthorized() [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] nova.exception.Unauthorized: Not authorized. [ 1332.134894] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] [ 1332.186079] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ff2e111a-9807-469c-b34c-2f5119d14917 tempest-ServerMetadataNegativeTestJSON-582189155 tempest-ServerMetadataNegativeTestJSON-582189155-project-member] Lock "9923a3c7-f090-4a01-8c57-36c8c22c6b14" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.426s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1332.187143] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "9923a3c7-f090-4a01-8c57-36c8c22c6b14" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 156.678s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1332.187356] env[68798]: INFO nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 9923a3c7-f090-4a01-8c57-36c8c22c6b14] During sync_power_state the instance has a pending task (deleting). Skip. [ 1332.187572] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "9923a3c7-f090-4a01-8c57-36c8c22c6b14" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1332.193607] env[68798]: INFO nova.scheduler.client.report [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Deleted allocations for instance fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a [ 1332.214048] env[68798]: DEBUG oslo_concurrency.lockutils [None req-515d0aa0-7a88-4357-b1cc-cbfea75a350c tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Lock "fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 627.995s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1332.217952] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cc66348a-3a39-4350-b11c-c7ae783a30a3 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Lock "fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 430.232s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1332.218214] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cc66348a-3a39-4350-b11c-c7ae783a30a3 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Acquiring lock "fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1332.218958] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cc66348a-3a39-4350-b11c-c7ae783a30a3 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Lock "fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1332.219163] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cc66348a-3a39-4350-b11c-c7ae783a30a3 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Lock "fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1332.221515] env[68798]: INFO nova.compute.manager [None req-cc66348a-3a39-4350-b11c-c7ae783a30a3 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Terminating instance [ 1332.224127] env[68798]: DEBUG nova.compute.manager [None req-cc66348a-3a39-4350-b11c-c7ae783a30a3 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1332.224337] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-cc66348a-3a39-4350-b11c-c7ae783a30a3 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1332.224617] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-47620fd3-f9b8-4055-9fc8-b75d0c313378 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.233646] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7942f8f3-5b20-482c-bf2c-c7c402896b45 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.247128] env[68798]: DEBUG nova.compute.manager [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1332.270136] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-cc66348a-3a39-4350-b11c-c7ae783a30a3 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a could not be found. [ 1332.270359] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-cc66348a-3a39-4350-b11c-c7ae783a30a3 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1332.270629] env[68798]: INFO nova.compute.manager [None req-cc66348a-3a39-4350-b11c-c7ae783a30a3 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1332.270904] env[68798]: DEBUG oslo.service.loopingcall [None req-cc66348a-3a39-4350-b11c-c7ae783a30a3 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1332.273752] env[68798]: DEBUG nova.compute.manager [-] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1332.273823] env[68798]: DEBUG nova.network.neutron [-] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1332.296619] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1332.364263] env[68798]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68798) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1332.364519] env[68798]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-5cd1af2e-465c-4dea-93b2-4e2ab831234d'] [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1332.365049] env[68798]: ERROR oslo.service.loopingcall [ 1332.366314] env[68798]: ERROR nova.compute.manager [None req-cc66348a-3a39-4350-b11c-c7ae783a30a3 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1332.397082] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a62150d2-384b-4774-b2f6-3d16f64081a0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.407894] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57b64764-a882-4d4f-9d96-1ddb80f1448a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.414817] env[68798]: ERROR nova.compute.manager [None req-cc66348a-3a39-4350-b11c-c7ae783a30a3 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Traceback (most recent call last): [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] ret = obj(*args, **kwargs) [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] exception_handler_v20(status_code, error_body) [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] raise client_exc(message=error_message, [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Neutron server returns request_ids: ['req-5cd1af2e-465c-4dea-93b2-4e2ab831234d'] [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] During handling of the above exception, another exception occurred: [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Traceback (most recent call last): [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] self._delete_instance(context, instance, bdms) [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] self._shutdown_instance(context, instance, bdms) [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] self._try_deallocate_network(context, instance, requested_networks) [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] with excutils.save_and_reraise_exception(): [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] self.force_reraise() [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] raise self.value [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] _deallocate_network_with_retries() [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return evt.wait() [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] result = hub.switch() [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return self.greenlet.switch() [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] result = func(*self.args, **self.kw) [ 1332.414817] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] result = f(*args, **kwargs) [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] self._deallocate_network( [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] self.network_api.deallocate_for_instance( [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] data = neutron.list_ports(**search_opts) [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] ret = obj(*args, **kwargs) [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return self.list('ports', self.ports_path, retrieve_all, [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] ret = obj(*args, **kwargs) [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] for r in self._pagination(collection, path, **params): [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] res = self.get(path, params=params) [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] ret = obj(*args, **kwargs) [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return self.retry_request("GET", action, body=body, [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] ret = obj(*args, **kwargs) [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] return self.do_request(method, action, body=body, [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] ret = obj(*args, **kwargs) [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] self._handle_fault_response(status_code, replybody, resp) [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1332.416622] env[68798]: ERROR nova.compute.manager [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] [ 1332.453825] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cc66348a-3a39-4350-b11c-c7ae783a30a3 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Lock "fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.236s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1332.456038] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4203094d-5cb4-4e2b-a727-09c09f1dc1b1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.459262] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 156.950s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1332.459546] env[68798]: INFO nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] During sync_power_state the instance has a pending task (deleting). Skip. [ 1332.459781] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1332.466463] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-795a728e-d10f-4a70-b64c-95303a9aa62d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.484810] env[68798]: DEBUG nova.compute.provider_tree [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1332.496512] env[68798]: DEBUG nova.scheduler.client.report [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1332.512250] env[68798]: DEBUG oslo_concurrency.lockutils [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.569s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1332.512743] env[68798]: DEBUG nova.compute.manager [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1332.515039] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.220s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1332.516420] env[68798]: INFO nova.compute.claims [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1332.550067] env[68798]: DEBUG nova.compute.utils [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1332.551534] env[68798]: DEBUG nova.compute.manager [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1332.551717] env[68798]: DEBUG nova.network.neutron [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1332.555197] env[68798]: INFO nova.compute.manager [None req-cc66348a-3a39-4350-b11c-c7ae783a30a3 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] [instance: fdb3f766-6f31-45e0-9d09-f3b2b61cdb8a] Successfully reverted task state from None on failure for instance. [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server [None req-cc66348a-3a39-4350-b11c-c7ae783a30a3 tempest-DeleteServersAdminTestJSON-1445962786 tempest-DeleteServersAdminTestJSON-1445962786-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-5cd1af2e-465c-4dea-93b2-4e2ab831234d'] [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server raise self.value [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server raise self.value [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server raise self.value [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 1332.559494] env[68798]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server raise self.value [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server raise self.value [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1332.560943] env[68798]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1332.563194] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1332.563194] env[68798]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1332.563194] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1332.563194] env[68798]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1332.563194] env[68798]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1332.563194] env[68798]: ERROR oslo_messaging.rpc.server [ 1332.563194] env[68798]: DEBUG nova.compute.manager [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1332.640367] env[68798]: DEBUG nova.compute.manager [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1332.669657] env[68798]: DEBUG nova.virt.hardware [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1332.669973] env[68798]: DEBUG nova.virt.hardware [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1332.670250] env[68798]: DEBUG nova.virt.hardware [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1332.670528] env[68798]: DEBUG nova.virt.hardware [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1332.670720] env[68798]: DEBUG nova.virt.hardware [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1332.670902] env[68798]: DEBUG nova.virt.hardware [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1332.671157] env[68798]: DEBUG nova.virt.hardware [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1332.671372] env[68798]: DEBUG nova.virt.hardware [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1332.671629] env[68798]: DEBUG nova.virt.hardware [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1332.671814] env[68798]: DEBUG nova.virt.hardware [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1332.671989] env[68798]: DEBUG nova.virt.hardware [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1332.672887] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcf3ccf6-c187-4728-8318-258605dfb643 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.687868] env[68798]: DEBUG nova.policy [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4591576f20d142a0a68342f8a1c9bfc4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f5b33dbd010340649a5c38226ec87f36', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 1332.690652] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-029a5d22-4821-4a5c-a446-62ed509da6a4 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.934495] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03ebde64-f70b-407c-a87f-4d48adadc71f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.943667] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ab9ef4-ab82-42d3-a389-9c7225ba1355 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.975708] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f30b6753-fc2d-4b55-9cff-1edba6c6cdd3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.984225] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ddd6601-d280-4645-a28c-d358aafd9a5e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.998678] env[68798]: DEBUG nova.compute.provider_tree [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1333.008793] env[68798]: DEBUG nova.scheduler.client.report [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1333.030538] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.515s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1333.031087] env[68798]: DEBUG nova.compute.manager [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1333.072024] env[68798]: DEBUG nova.network.neutron [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Successfully created port: 5df401e1-e051-4732-aeee-6e4e3649658d {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1333.075926] env[68798]: DEBUG nova.compute.utils [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1333.077204] env[68798]: DEBUG nova.compute.manager [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1333.077380] env[68798]: DEBUG nova.network.neutron [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1333.088792] env[68798]: DEBUG nova.compute.manager [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1333.163404] env[68798]: DEBUG nova.compute.manager [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1333.176093] env[68798]: DEBUG nova.policy [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '58e7ee34608848b39cc2a7114e7d682d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8efeea8a59294c7ca8b499dda555a3d6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 1333.192738] env[68798]: DEBUG nova.virt.hardware [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1333.192965] env[68798]: DEBUG nova.virt.hardware [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1333.193140] env[68798]: DEBUG nova.virt.hardware [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1333.193324] env[68798]: DEBUG nova.virt.hardware [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1333.193555] env[68798]: DEBUG nova.virt.hardware [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1333.193607] env[68798]: DEBUG nova.virt.hardware [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1333.193875] env[68798]: DEBUG nova.virt.hardware [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1333.193968] env[68798]: DEBUG nova.virt.hardware [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1333.194210] env[68798]: DEBUG nova.virt.hardware [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1333.194358] env[68798]: DEBUG nova.virt.hardware [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1333.194536] env[68798]: DEBUG nova.virt.hardware [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1333.195599] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bcb5979-ae15-4e45-a5ee-966737776434 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.204937] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-900aaeda-25e5-4df4-b4a0-8459f67153bf {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.047668] env[68798]: DEBUG nova.network.neutron [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Successfully created port: 35800a0f-8738-4472-97e8-f84ba0a3b746 {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1334.369601] env[68798]: DEBUG nova.compute.manager [req-3c13f9cb-b432-4e85-8ec7-09374973ae12 req-3c3dd860-9ee0-420a-803e-ef057e4c8217 service nova] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Received event network-vif-plugged-5df401e1-e051-4732-aeee-6e4e3649658d {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1334.369828] env[68798]: DEBUG oslo_concurrency.lockutils [req-3c13f9cb-b432-4e85-8ec7-09374973ae12 req-3c3dd860-9ee0-420a-803e-ef057e4c8217 service nova] Acquiring lock "e75b2848-5dfa-4ffa-b37a-6338c8221dd3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1334.370767] env[68798]: DEBUG oslo_concurrency.lockutils [req-3c13f9cb-b432-4e85-8ec7-09374973ae12 req-3c3dd860-9ee0-420a-803e-ef057e4c8217 service nova] Lock "e75b2848-5dfa-4ffa-b37a-6338c8221dd3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1334.371036] env[68798]: DEBUG oslo_concurrency.lockutils [req-3c13f9cb-b432-4e85-8ec7-09374973ae12 req-3c3dd860-9ee0-420a-803e-ef057e4c8217 service nova] Lock "e75b2848-5dfa-4ffa-b37a-6338c8221dd3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1334.371289] env[68798]: DEBUG nova.compute.manager [req-3c13f9cb-b432-4e85-8ec7-09374973ae12 req-3c3dd860-9ee0-420a-803e-ef057e4c8217 service nova] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] No waiting events found dispatching network-vif-plugged-5df401e1-e051-4732-aeee-6e4e3649658d {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1334.371524] env[68798]: WARNING nova.compute.manager [req-3c13f9cb-b432-4e85-8ec7-09374973ae12 req-3c3dd860-9ee0-420a-803e-ef057e4c8217 service nova] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Received unexpected event network-vif-plugged-5df401e1-e051-4732-aeee-6e4e3649658d for instance with vm_state building and task_state spawning. [ 1334.564828] env[68798]: DEBUG nova.network.neutron [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Successfully updated port: 5df401e1-e051-4732-aeee-6e4e3649658d {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1334.576175] env[68798]: DEBUG oslo_concurrency.lockutils [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquiring lock "refresh_cache-e75b2848-5dfa-4ffa-b37a-6338c8221dd3" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1334.576662] env[68798]: DEBUG oslo_concurrency.lockutils [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquired lock "refresh_cache-e75b2848-5dfa-4ffa-b37a-6338c8221dd3" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1334.577971] env[68798]: DEBUG nova.network.neutron [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1334.641274] env[68798]: DEBUG nova.network.neutron [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1335.045809] env[68798]: DEBUG nova.network.neutron [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Updating instance_info_cache with network_info: [{"id": "5df401e1-e051-4732-aeee-6e4e3649658d", "address": "fa:16:3e:39:9a:1c", "network": {"id": "052d5114-ea0a-492e-aada-eb5e2258f0e6", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1119146414-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5b33dbd010340649a5c38226ec87f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5df401e1-e0", "ovs_interfaceid": "5df401e1-e051-4732-aeee-6e4e3649658d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1335.062245] env[68798]: DEBUG oslo_concurrency.lockutils [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Releasing lock "refresh_cache-e75b2848-5dfa-4ffa-b37a-6338c8221dd3" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1335.062613] env[68798]: DEBUG nova.compute.manager [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Instance network_info: |[{"id": "5df401e1-e051-4732-aeee-6e4e3649658d", "address": "fa:16:3e:39:9a:1c", "network": {"id": "052d5114-ea0a-492e-aada-eb5e2258f0e6", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1119146414-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5b33dbd010340649a5c38226ec87f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5df401e1-e0", "ovs_interfaceid": "5df401e1-e051-4732-aeee-6e4e3649658d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1335.063063] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:9a:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5e1049e8-c06b-4c93-a9e1-2cbb530f3f95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5df401e1-e051-4732-aeee-6e4e3649658d', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1335.071074] env[68798]: DEBUG oslo.service.loopingcall [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1335.071621] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1335.071851] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5556db12-730b-4e6a-a5c6-df8ec97057dc {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.094313] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1335.094313] env[68798]: value = "task-4217633" [ 1335.094313] env[68798]: _type = "Task" [ 1335.094313] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.105206] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217633, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.243021] env[68798]: DEBUG nova.network.neutron [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Successfully updated port: 35800a0f-8738-4472-97e8-f84ba0a3b746 {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1335.259593] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquiring lock "refresh_cache-cbe4e626-f063-4877-985f-b3e36c161c9e" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1335.259593] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquired lock "refresh_cache-cbe4e626-f063-4877-985f-b3e36c161c9e" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1335.259593] env[68798]: DEBUG nova.network.neutron [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1335.321109] env[68798]: DEBUG nova.network.neutron [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1335.605600] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217633, 'name': CreateVM_Task, 'duration_secs': 0.362371} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.605600] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1335.606154] env[68798]: DEBUG oslo_concurrency.lockutils [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1335.606325] env[68798]: DEBUG oslo_concurrency.lockutils [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1335.606656] env[68798]: DEBUG oslo_concurrency.lockutils [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1335.606919] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b4fada4-c1e0-4697-ae6f-6bef84ff5600 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.612171] env[68798]: DEBUG oslo_vmware.api [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Waiting for the task: (returnval){ [ 1335.612171] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]525ca085-49cc-904e-7f66-f1bde7a7d994" [ 1335.612171] env[68798]: _type = "Task" [ 1335.612171] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.620576] env[68798]: DEBUG oslo_vmware.api [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]525ca085-49cc-904e-7f66-f1bde7a7d994, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.808255] env[68798]: DEBUG nova.network.neutron [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Updating instance_info_cache with network_info: [{"id": "35800a0f-8738-4472-97e8-f84ba0a3b746", "address": "fa:16:3e:c0:0d:09", "network": {"id": "0b7b81db-f8e9-4c4a-9e95-8ce4e76ecc2f", "bridge": "br-int", "label": "tempest-ServersTestJSON-1522738958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efeea8a59294c7ca8b499dda555a3d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c6324fd-a761-417c-bc85-b6278daecfc5", "external-id": "nsx-vlan-transportzone-426", "segmentation_id": 426, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35800a0f-87", "ovs_interfaceid": "35800a0f-8738-4472-97e8-f84ba0a3b746", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1335.823491] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Releasing lock "refresh_cache-cbe4e626-f063-4877-985f-b3e36c161c9e" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1335.823776] env[68798]: DEBUG nova.compute.manager [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Instance network_info: |[{"id": "35800a0f-8738-4472-97e8-f84ba0a3b746", "address": "fa:16:3e:c0:0d:09", "network": {"id": "0b7b81db-f8e9-4c4a-9e95-8ce4e76ecc2f", "bridge": "br-int", "label": "tempest-ServersTestJSON-1522738958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efeea8a59294c7ca8b499dda555a3d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c6324fd-a761-417c-bc85-b6278daecfc5", "external-id": "nsx-vlan-transportzone-426", "segmentation_id": 426, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35800a0f-87", "ovs_interfaceid": "35800a0f-8738-4472-97e8-f84ba0a3b746", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1335.824238] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c0:0d:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2c6324fd-a761-417c-bc85-b6278daecfc5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '35800a0f-8738-4472-97e8-f84ba0a3b746', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1335.831468] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Creating folder: Project (8efeea8a59294c7ca8b499dda555a3d6). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1335.832149] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d4df3a00-ed4a-43ea-8ea5-12382e3e96dc {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.844146] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Created folder: Project (8efeea8a59294c7ca8b499dda555a3d6) in parent group-v834492. [ 1335.844422] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Creating folder: Instances. Parent ref: group-v834566. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1335.844710] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-627dea09-c40c-46fc-9164-4a2602f1f6eb {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.855380] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Created folder: Instances in parent group-v834566. [ 1335.855639] env[68798]: DEBUG oslo.service.loopingcall [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1335.855833] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1335.856050] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1d710b71-d403-4ee1-82a5-8bb7c7455dba {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.876086] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1335.876086] env[68798]: value = "task-4217636" [ 1335.876086] env[68798]: _type = "Task" [ 1335.876086] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.884543] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217636, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.123330] env[68798]: DEBUG oslo_concurrency.lockutils [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1336.123666] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1336.123906] env[68798]: DEBUG oslo_concurrency.lockutils [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1336.387277] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217636, 'name': CreateVM_Task, 'duration_secs': 0.371} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.387486] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1336.388247] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1336.388439] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1336.388772] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1336.389048] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4198dbd-05c9-4941-901a-e94f5f4e6d10 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.394496] env[68798]: DEBUG oslo_vmware.api [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Waiting for the task: (returnval){ [ 1336.394496] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]5269f7d2-a505-06b8-e0de-1ff2151c18d1" [ 1336.394496] env[68798]: _type = "Task" [ 1336.394496] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.404236] env[68798]: DEBUG oslo_vmware.api [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]5269f7d2-a505-06b8-e0de-1ff2151c18d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.405779] env[68798]: DEBUG nova.compute.manager [req-275ba050-59cd-4643-8949-6cde2a10da32 req-4f63cc6c-5351-48f6-bbe1-02d3875529d4 service nova] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Received event network-changed-5df401e1-e051-4732-aeee-6e4e3649658d {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1336.405962] env[68798]: DEBUG nova.compute.manager [req-275ba050-59cd-4643-8949-6cde2a10da32 req-4f63cc6c-5351-48f6-bbe1-02d3875529d4 service nova] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Refreshing instance network info cache due to event network-changed-5df401e1-e051-4732-aeee-6e4e3649658d. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1336.406224] env[68798]: DEBUG oslo_concurrency.lockutils [req-275ba050-59cd-4643-8949-6cde2a10da32 req-4f63cc6c-5351-48f6-bbe1-02d3875529d4 service nova] Acquiring lock "refresh_cache-e75b2848-5dfa-4ffa-b37a-6338c8221dd3" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1336.406382] env[68798]: DEBUG oslo_concurrency.lockutils [req-275ba050-59cd-4643-8949-6cde2a10da32 req-4f63cc6c-5351-48f6-bbe1-02d3875529d4 service nova] Acquired lock "refresh_cache-e75b2848-5dfa-4ffa-b37a-6338c8221dd3" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1336.406555] env[68798]: DEBUG nova.network.neutron [req-275ba050-59cd-4643-8949-6cde2a10da32 req-4f63cc6c-5351-48f6-bbe1-02d3875529d4 service nova] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Refreshing network info cache for port 5df401e1-e051-4732-aeee-6e4e3649658d {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1336.678137] env[68798]: DEBUG nova.network.neutron [req-275ba050-59cd-4643-8949-6cde2a10da32 req-4f63cc6c-5351-48f6-bbe1-02d3875529d4 service nova] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Updated VIF entry in instance network info cache for port 5df401e1-e051-4732-aeee-6e4e3649658d. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1336.678625] env[68798]: DEBUG nova.network.neutron [req-275ba050-59cd-4643-8949-6cde2a10da32 req-4f63cc6c-5351-48f6-bbe1-02d3875529d4 service nova] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Updating instance_info_cache with network_info: [{"id": "5df401e1-e051-4732-aeee-6e4e3649658d", "address": "fa:16:3e:39:9a:1c", "network": {"id": "052d5114-ea0a-492e-aada-eb5e2258f0e6", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1119146414-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5b33dbd010340649a5c38226ec87f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5df401e1-e0", "ovs_interfaceid": "5df401e1-e051-4732-aeee-6e4e3649658d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1336.688681] env[68798]: DEBUG oslo_concurrency.lockutils [req-275ba050-59cd-4643-8949-6cde2a10da32 req-4f63cc6c-5351-48f6-bbe1-02d3875529d4 service nova] Releasing lock "refresh_cache-e75b2848-5dfa-4ffa-b37a-6338c8221dd3" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1336.688681] env[68798]: DEBUG nova.compute.manager [req-275ba050-59cd-4643-8949-6cde2a10da32 req-4f63cc6c-5351-48f6-bbe1-02d3875529d4 service nova] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Received event network-vif-plugged-35800a0f-8738-4472-97e8-f84ba0a3b746 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1336.688870] env[68798]: DEBUG oslo_concurrency.lockutils [req-275ba050-59cd-4643-8949-6cde2a10da32 req-4f63cc6c-5351-48f6-bbe1-02d3875529d4 service nova] Acquiring lock "cbe4e626-f063-4877-985f-b3e36c161c9e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1336.689116] env[68798]: DEBUG oslo_concurrency.lockutils [req-275ba050-59cd-4643-8949-6cde2a10da32 req-4f63cc6c-5351-48f6-bbe1-02d3875529d4 service nova] Lock "cbe4e626-f063-4877-985f-b3e36c161c9e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1336.689295] env[68798]: DEBUG oslo_concurrency.lockutils [req-275ba050-59cd-4643-8949-6cde2a10da32 req-4f63cc6c-5351-48f6-bbe1-02d3875529d4 service nova] Lock "cbe4e626-f063-4877-985f-b3e36c161c9e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1336.689466] env[68798]: DEBUG nova.compute.manager [req-275ba050-59cd-4643-8949-6cde2a10da32 req-4f63cc6c-5351-48f6-bbe1-02d3875529d4 service nova] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] No waiting events found dispatching network-vif-plugged-35800a0f-8738-4472-97e8-f84ba0a3b746 {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1336.689637] env[68798]: WARNING nova.compute.manager [req-275ba050-59cd-4643-8949-6cde2a10da32 req-4f63cc6c-5351-48f6-bbe1-02d3875529d4 service nova] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Received unexpected event network-vif-plugged-35800a0f-8738-4472-97e8-f84ba0a3b746 for instance with vm_state building and task_state spawning. [ 1336.689873] env[68798]: DEBUG nova.compute.manager [req-275ba050-59cd-4643-8949-6cde2a10da32 req-4f63cc6c-5351-48f6-bbe1-02d3875529d4 service nova] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Received event network-changed-35800a0f-8738-4472-97e8-f84ba0a3b746 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1336.689976] env[68798]: DEBUG nova.compute.manager [req-275ba050-59cd-4643-8949-6cde2a10da32 req-4f63cc6c-5351-48f6-bbe1-02d3875529d4 service nova] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Refreshing instance network info cache due to event network-changed-35800a0f-8738-4472-97e8-f84ba0a3b746. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1336.690148] env[68798]: DEBUG oslo_concurrency.lockutils [req-275ba050-59cd-4643-8949-6cde2a10da32 req-4f63cc6c-5351-48f6-bbe1-02d3875529d4 service nova] Acquiring lock "refresh_cache-cbe4e626-f063-4877-985f-b3e36c161c9e" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1336.690281] env[68798]: DEBUG oslo_concurrency.lockutils [req-275ba050-59cd-4643-8949-6cde2a10da32 req-4f63cc6c-5351-48f6-bbe1-02d3875529d4 service nova] Acquired lock "refresh_cache-cbe4e626-f063-4877-985f-b3e36c161c9e" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1336.690485] env[68798]: DEBUG nova.network.neutron [req-275ba050-59cd-4643-8949-6cde2a10da32 req-4f63cc6c-5351-48f6-bbe1-02d3875529d4 service nova] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Refreshing network info cache for port 35800a0f-8738-4472-97e8-f84ba0a3b746 {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1336.905628] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1336.906016] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1336.906245] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1337.045597] env[68798]: DEBUG nova.network.neutron [req-275ba050-59cd-4643-8949-6cde2a10da32 req-4f63cc6c-5351-48f6-bbe1-02d3875529d4 service nova] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Updated VIF entry in instance network info cache for port 35800a0f-8738-4472-97e8-f84ba0a3b746. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1337.046641] env[68798]: DEBUG nova.network.neutron [req-275ba050-59cd-4643-8949-6cde2a10da32 req-4f63cc6c-5351-48f6-bbe1-02d3875529d4 service nova] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Updating instance_info_cache with network_info: [{"id": "35800a0f-8738-4472-97e8-f84ba0a3b746", "address": "fa:16:3e:c0:0d:09", "network": {"id": "0b7b81db-f8e9-4c4a-9e95-8ce4e76ecc2f", "bridge": "br-int", "label": "tempest-ServersTestJSON-1522738958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efeea8a59294c7ca8b499dda555a3d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c6324fd-a761-417c-bc85-b6278daecfc5", "external-id": "nsx-vlan-transportzone-426", "segmentation_id": 426, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35800a0f-87", "ovs_interfaceid": "35800a0f-8738-4472-97e8-f84ba0a3b746", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1337.056509] env[68798]: DEBUG oslo_concurrency.lockutils [req-275ba050-59cd-4643-8949-6cde2a10da32 req-4f63cc6c-5351-48f6-bbe1-02d3875529d4 service nova] Releasing lock "refresh_cache-cbe4e626-f063-4877-985f-b3e36c161c9e" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1340.946456] env[68798]: DEBUG oslo_concurrency.lockutils [None req-5e0a0380-f4d7-4d3e-a209-35cc0a8a3eba tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquiring lock "e75b2848-5dfa-4ffa-b37a-6338c8221dd3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1343.876430] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1344.048285] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1345.048639] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1345.048912] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1345.048982] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1345.071984] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1345.072188] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1345.072309] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1345.072438] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1345.072564] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1345.072689] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1345.072810] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1345.072931] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1345.073062] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1345.073185] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1345.073337] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1345.073897] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1346.048127] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1346.048387] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1346.048599] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1346.048753] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1346.908287] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Acquiring lock "b430775d-fcfb-4233-bc78-87d279e82fb5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.908472] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Lock "b430775d-fcfb-4233-bc78-87d279e82fb5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1347.049049] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1351.048504] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1351.060726] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1351.060866] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1351.061018] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.061190] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1351.062351] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3553dbd8-d078-4ee8-873a-846a82cea8dd {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.071567] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-220c9497-4cc3-40fc-8bc7-0afb2c139df9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.087310] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5342f8ec-f529-43f9-a97b-5a5b40942e93 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.094354] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a8fe87-db2b-478a-867d-312d8bed99b1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.124236] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180737MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1351.124395] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1351.124597] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1351.201675] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e848c3f4-64ff-4956-88e0-afa27be73068 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1351.201838] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 30e8027d-98b3-4a5f-9eb4-244846cb90e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1351.201971] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance a7060037-2580-464a-b434-90ffe7314bd1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1351.202110] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 1ae2e411-d8e4-4abb-8c7b-b907ebba094c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1351.202238] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1351.202360] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 5e53196f-984a-4d72-8e00-861ef0751dca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1351.202476] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 7bea1932-0490-409b-99b0-bd1f3f1a9d5d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1351.202592] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance da5d9023-f6c1-44f8-9465-36aa2b109924 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1351.202707] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e75b2848-5dfa-4ffa-b37a-6338c8221dd3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1351.202824] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance cbe4e626-f063-4877-985f-b3e36c161c9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1351.213824] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 17cce398-d2f8-47a6-b714-c4e54caec516 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1351.224855] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ca976c34-4eb0-46aa-a243-91401f842c32 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1351.235128] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 6cf9a284-56a7-4780-b7a1-fedf77f8231a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1351.245965] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 802f2573-8a44-489d-a0a0-32ca69dc6281 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1351.257196] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 19702e1f-2d11-492c-9e9e-067d1aa2b6a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1351.268647] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e42bca43-6e9e-49d5-8cbd-4c57e5f0123b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1351.279808] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance de6bd0a0-27c9-4f6b-932f-d7fac5fd2e4c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1351.291947] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 17199829-f72a-4ba6-93b4-da057f00bbc7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1351.303238] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance dcced840-b57b-47bd-8d7b-bfe971290659 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1351.314185] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 36980008-f639-4c88-afcf-0dba40420b87 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1351.326714] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 1a5de688-91c2-4197-a396-c0df71fdbeda has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1351.338434] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance c142b101-e8b0-4073-9079-5c9730eac176 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1351.349172] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance b430775d-fcfb-4233-bc78-87d279e82fb5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1351.349419] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1351.349568] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1351.636877] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e48ff1-ca89-4440-9129-4a1b6b34cad1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.645024] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebaaee5c-8338-4ba2-a93d-7d71d96a48ae {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.675200] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-914dbc3c-97bb-4f08-b2e2-392086016936 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.683426] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65bd26bc-0d37-4552-a398-9d157df21e5b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.697784] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1351.706113] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1351.726641] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1351.726853] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.602s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.426149] env[68798]: WARNING oslo_vmware.rw_handles [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1379.426149] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1379.426149] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1379.426149] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1379.426149] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1379.426149] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 1379.426149] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1379.426149] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1379.426149] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1379.426149] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1379.426149] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1379.426149] env[68798]: ERROR oslo_vmware.rw_handles [ 1379.426854] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/9a0c4652-d626-4553-8c28-459af2cbce26/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1379.428978] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1379.429296] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Copying Virtual Disk [datastore1] vmware_temp/9a0c4652-d626-4553-8c28-459af2cbce26/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/9a0c4652-d626-4553-8c28-459af2cbce26/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1379.429604] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-db704f5a-eefd-46c5-a45a-95c80a7f9ecf {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.439328] env[68798]: DEBUG oslo_vmware.api [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Waiting for the task: (returnval){ [ 1379.439328] env[68798]: value = "task-4217637" [ 1379.439328] env[68798]: _type = "Task" [ 1379.439328] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.447899] env[68798]: DEBUG oslo_vmware.api [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Task: {'id': task-4217637, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.950723] env[68798]: DEBUG oslo_vmware.exceptions [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1379.951055] env[68798]: DEBUG oslo_concurrency.lockutils [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1379.951627] env[68798]: ERROR nova.compute.manager [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1379.951627] env[68798]: Faults: ['InvalidArgument'] [ 1379.951627] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Traceback (most recent call last): [ 1379.951627] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1379.951627] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] yield resources [ 1379.951627] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1379.951627] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] self.driver.spawn(context, instance, image_meta, [ 1379.951627] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1379.951627] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1379.951627] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1379.951627] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] self._fetch_image_if_missing(context, vi) [ 1379.951627] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1379.951627] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] image_cache(vi, tmp_image_ds_loc) [ 1379.951627] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1379.951627] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] vm_util.copy_virtual_disk( [ 1379.951627] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1379.951627] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] session._wait_for_task(vmdk_copy_task) [ 1379.951627] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1379.951627] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] return self.wait_for_task(task_ref) [ 1379.951627] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1379.951627] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] return evt.wait() [ 1379.951627] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1379.951627] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] result = hub.switch() [ 1379.951627] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1379.951627] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] return self.greenlet.switch() [ 1379.951627] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1379.951627] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] self.f(*self.args, **self.kw) [ 1379.951627] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1379.951627] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] raise exceptions.translate_fault(task_info.error) [ 1379.951627] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1379.951627] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Faults: ['InvalidArgument'] [ 1379.951627] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] [ 1379.952711] env[68798]: INFO nova.compute.manager [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Terminating instance [ 1379.953550] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1379.953767] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1379.954030] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5348ffa3-0430-47ce-bcaa-4b54911b9952 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.956278] env[68798]: DEBUG nova.compute.manager [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1379.956604] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1379.957216] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cee9b0df-5fb9-4e70-af87-d18630ead602 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.964847] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1379.965102] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-79097852-1bd0-4ad0-b796-61ba964dc8b9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.967504] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1379.967684] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1379.968645] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62543a88-b969-42f1-a369-e58e0113599c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.973665] env[68798]: DEBUG oslo_vmware.api [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Waiting for the task: (returnval){ [ 1379.973665] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52d87d0d-f53a-0142-1f88-891ad03c67a4" [ 1379.973665] env[68798]: _type = "Task" [ 1379.973665] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.982011] env[68798]: DEBUG oslo_vmware.api [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52d87d0d-f53a-0142-1f88-891ad03c67a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.043801] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1380.044077] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1380.044218] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Deleting the datastore file [datastore1] e848c3f4-64ff-4956-88e0-afa27be73068 {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1380.044492] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dba4c433-87c3-44c0-bf69-9b81480ac1b1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.050907] env[68798]: DEBUG oslo_vmware.api [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Waiting for the task: (returnval){ [ 1380.050907] env[68798]: value = "task-4217639" [ 1380.050907] env[68798]: _type = "Task" [ 1380.050907] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.059427] env[68798]: DEBUG oslo_vmware.api [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Task: {'id': task-4217639, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.484616] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1380.484905] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Creating directory with path [datastore1] vmware_temp/771e24b8-e112-4f2c-a63c-9425398852b5/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1380.485555] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1fb1d79c-397e-429e-a783-8c73425b6e69 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.496994] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Created directory with path [datastore1] vmware_temp/771e24b8-e112-4f2c-a63c-9425398852b5/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1380.497221] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Fetch image to [datastore1] vmware_temp/771e24b8-e112-4f2c-a63c-9425398852b5/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1380.497393] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/771e24b8-e112-4f2c-a63c-9425398852b5/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1380.498153] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34599fa1-bdd2-45c5-8adb-f84e7211e35a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.505440] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a14d38eb-b932-4007-8843-b0b6ed553a7c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.515058] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f0cd984-a729-40b0-a2e3-b367f2bc75db {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.546107] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f65743-fa58-4001-b7cf-ce4928838ade {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.555417] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-96cda2a9-7b9b-4ef3-87ac-060d5e460196 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.563967] env[68798]: DEBUG oslo_vmware.api [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Task: {'id': task-4217639, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068272} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.563967] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1380.563967] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1380.563967] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1380.564226] env[68798]: INFO nova.compute.manager [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1380.566260] env[68798]: DEBUG nova.compute.claims [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1380.566475] env[68798]: DEBUG oslo_concurrency.lockutils [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1380.566724] env[68798]: DEBUG oslo_concurrency.lockutils [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1380.584710] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1380.641825] env[68798]: DEBUG oslo_vmware.rw_handles [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/771e24b8-e112-4f2c-a63c-9425398852b5/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1380.700865] env[68798]: DEBUG oslo_vmware.rw_handles [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1380.701079] env[68798]: DEBUG oslo_vmware.rw_handles [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/771e24b8-e112-4f2c-a63c-9425398852b5/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1380.936847] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ff59787-78f5-4b97-811c-21d5eb357cbc {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.945415] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db0b3ad8-6a73-4902-9f96-a40e84b376ec {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.975104] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-629d2432-b1e9-4466-94df-6cf6bc9d91d3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.982878] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77c594ba-a0ec-4bec-a3b4-1c2fe6691fc4 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.996297] env[68798]: DEBUG nova.compute.provider_tree [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1381.004941] env[68798]: DEBUG nova.scheduler.client.report [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1381.018354] env[68798]: DEBUG oslo_concurrency.lockutils [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.452s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.018877] env[68798]: ERROR nova.compute.manager [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1381.018877] env[68798]: Faults: ['InvalidArgument'] [ 1381.018877] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Traceback (most recent call last): [ 1381.018877] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1381.018877] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] self.driver.spawn(context, instance, image_meta, [ 1381.018877] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1381.018877] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1381.018877] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1381.018877] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] self._fetch_image_if_missing(context, vi) [ 1381.018877] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1381.018877] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] image_cache(vi, tmp_image_ds_loc) [ 1381.018877] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1381.018877] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] vm_util.copy_virtual_disk( [ 1381.018877] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1381.018877] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] session._wait_for_task(vmdk_copy_task) [ 1381.018877] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1381.018877] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] return self.wait_for_task(task_ref) [ 1381.018877] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1381.018877] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] return evt.wait() [ 1381.018877] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1381.018877] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] result = hub.switch() [ 1381.018877] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1381.018877] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] return self.greenlet.switch() [ 1381.018877] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1381.018877] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] self.f(*self.args, **self.kw) [ 1381.018877] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1381.018877] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] raise exceptions.translate_fault(task_info.error) [ 1381.018877] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1381.018877] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Faults: ['InvalidArgument'] [ 1381.018877] env[68798]: ERROR nova.compute.manager [instance: e848c3f4-64ff-4956-88e0-afa27be73068] [ 1381.019711] env[68798]: DEBUG nova.compute.utils [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1381.021065] env[68798]: DEBUG nova.compute.manager [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Build of instance e848c3f4-64ff-4956-88e0-afa27be73068 was re-scheduled: A specified parameter was not correct: fileType [ 1381.021065] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1381.021458] env[68798]: DEBUG nova.compute.manager [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1381.021629] env[68798]: DEBUG nova.compute.manager [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1381.021861] env[68798]: DEBUG nova.compute.manager [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1381.021982] env[68798]: DEBUG nova.network.neutron [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1381.388482] env[68798]: DEBUG nova.network.neutron [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1381.418967] env[68798]: INFO nova.compute.manager [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Took 0.40 seconds to deallocate network for instance. [ 1381.531524] env[68798]: INFO nova.scheduler.client.report [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Deleted allocations for instance e848c3f4-64ff-4956-88e0-afa27be73068 [ 1381.554255] env[68798]: DEBUG oslo_concurrency.lockutils [None req-03c968f2-8312-4e49-938a-b8bc6661e04a tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "e848c3f4-64ff-4956-88e0-afa27be73068" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 630.125s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.555669] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d05e571b-859c-4ad0-868e-0b6bbd3eaea7 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "e848c3f4-64ff-4956-88e0-afa27be73068" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 433.894s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.556354] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d05e571b-859c-4ad0-868e-0b6bbd3eaea7 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquiring lock "e848c3f4-64ff-4956-88e0-afa27be73068-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.556354] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d05e571b-859c-4ad0-868e-0b6bbd3eaea7 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "e848c3f4-64ff-4956-88e0-afa27be73068-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.556354] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d05e571b-859c-4ad0-868e-0b6bbd3eaea7 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "e848c3f4-64ff-4956-88e0-afa27be73068-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.558734] env[68798]: INFO nova.compute.manager [None req-d05e571b-859c-4ad0-868e-0b6bbd3eaea7 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Terminating instance [ 1381.560556] env[68798]: DEBUG nova.compute.manager [None req-d05e571b-859c-4ad0-868e-0b6bbd3eaea7 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1381.560748] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-d05e571b-859c-4ad0-868e-0b6bbd3eaea7 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1381.561330] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a3c4445f-6f78-430b-826c-c939505c2417 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.570539] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-695b6e44-5daa-41e8-bb4a-fd1be51bef6b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.588473] env[68798]: DEBUG nova.compute.manager [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1381.600287] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-d05e571b-859c-4ad0-868e-0b6bbd3eaea7 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e848c3f4-64ff-4956-88e0-afa27be73068 could not be found. [ 1381.600512] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-d05e571b-859c-4ad0-868e-0b6bbd3eaea7 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1381.600692] env[68798]: INFO nova.compute.manager [None req-d05e571b-859c-4ad0-868e-0b6bbd3eaea7 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1381.600947] env[68798]: DEBUG oslo.service.loopingcall [None req-d05e571b-859c-4ad0-868e-0b6bbd3eaea7 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1381.601245] env[68798]: DEBUG nova.compute.manager [-] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1381.601350] env[68798]: DEBUG nova.network.neutron [-] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1381.631273] env[68798]: DEBUG nova.network.neutron [-] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1381.639843] env[68798]: INFO nova.compute.manager [-] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] Took 0.04 seconds to deallocate network for instance. [ 1381.641983] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.642239] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.643675] env[68798]: INFO nova.compute.claims [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1381.728891] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d05e571b-859c-4ad0-868e-0b6bbd3eaea7 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "e848c3f4-64ff-4956-88e0-afa27be73068" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.173s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.729799] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "e848c3f4-64ff-4956-88e0-afa27be73068" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 206.220s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.729987] env[68798]: INFO nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: e848c3f4-64ff-4956-88e0-afa27be73068] During sync_power_state the instance has a pending task (deleting). Skip. [ 1381.730176] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "e848c3f4-64ff-4956-88e0-afa27be73068" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.966320] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c17180cd-9615-4155-a6cd-9ec50f8bd55f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.974338] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-011e252a-f466-4b40-918e-d29d9f473254 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.006875] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66d1c23-2d07-4c9e-9275-f39067bd7372 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.015041] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4368044e-bcff-466f-87e5-6c2da74621c1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.028793] env[68798]: DEBUG nova.compute.provider_tree [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1382.039441] env[68798]: DEBUG nova.scheduler.client.report [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1382.053278] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.411s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1382.053946] env[68798]: DEBUG nova.compute.manager [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1382.088163] env[68798]: DEBUG nova.compute.utils [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1382.090474] env[68798]: DEBUG nova.compute.manager [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Not allocating networking since 'none' was specified. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 1382.100171] env[68798]: DEBUG nova.compute.manager [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1382.161726] env[68798]: DEBUG nova.compute.manager [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1382.189312] env[68798]: DEBUG nova.virt.hardware [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1382.189587] env[68798]: DEBUG nova.virt.hardware [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1382.189747] env[68798]: DEBUG nova.virt.hardware [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1382.189930] env[68798]: DEBUG nova.virt.hardware [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1382.190096] env[68798]: DEBUG nova.virt.hardware [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1382.190254] env[68798]: DEBUG nova.virt.hardware [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1382.190558] env[68798]: DEBUG nova.virt.hardware [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1382.190736] env[68798]: DEBUG nova.virt.hardware [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1382.190912] env[68798]: DEBUG nova.virt.hardware [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1382.191117] env[68798]: DEBUG nova.virt.hardware [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1382.191308] env[68798]: DEBUG nova.virt.hardware [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1382.192226] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd990cc8-dcd4-4080-b19c-693e45c70e39 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.200554] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9818d6a8-5c68-474d-a69e-5c0e6af91d3e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.215662] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Instance VIF info [] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1382.222585] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Creating folder: Project (cb0fe647d94a41ceb8dae1f0f2d02232). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1382.222908] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9cc8e904-b83e-418e-82c2-4d52f1a0f433 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.233712] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Created folder: Project (cb0fe647d94a41ceb8dae1f0f2d02232) in parent group-v834492. [ 1382.233839] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Creating folder: Instances. Parent ref: group-v834569. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1382.234104] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-60100aad-60fd-41e5-83d3-09ca1f06b4e8 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.243763] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Created folder: Instances in parent group-v834569. [ 1382.244017] env[68798]: DEBUG oslo.service.loopingcall [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1382.244227] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1382.244437] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ae35d7db-535c-4f69-a86f-f4c8cc87ca4d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.261440] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1382.261440] env[68798]: value = "task-4217642" [ 1382.261440] env[68798]: _type = "Task" [ 1382.261440] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.269129] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217642, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.772373] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217642, 'name': CreateVM_Task, 'duration_secs': 0.280613} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.772693] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1382.772982] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1382.773159] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1382.773484] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1382.773743] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e030c27-ed0d-4f60-9217-be11584345e2 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.778620] env[68798]: DEBUG oslo_vmware.api [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Waiting for the task: (returnval){ [ 1382.778620] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]5241759a-30fa-0408-85d9-da3f18f975c9" [ 1382.778620] env[68798]: _type = "Task" [ 1382.778620] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.792064] env[68798]: DEBUG oslo_vmware.api [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]5241759a-30fa-0408-85d9-da3f18f975c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.289095] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1383.289403] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1383.289638] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1394.234899] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Acquiring lock "71c99eda-d55d-4d60-92d2-a5553c3c3760" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1394.235810] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Lock "71c99eda-d55d-4d60-92d2-a5553c3c3760" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1404.722632] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1405.048774] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1405.048984] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1405.049135] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1405.074069] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1405.074253] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1405.074364] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1405.074494] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1405.074620] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1405.074746] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1405.074868] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1405.074993] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1405.075176] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1405.075309] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1405.075436] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1405.075997] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1406.048582] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1407.048577] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1407.048889] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1407.049138] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1407.049329] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1407.049452] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1410.044613] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1413.048772] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1413.062752] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.062988] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1413.063186] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1413.063354] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1413.064947] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8262bb03-d8da-4bcb-89bb-7547b4030b7c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.073786] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc4121c2-6952-4f94-bb06-c469bc758600 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.088297] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfeb542d-c112-4674-8f67-d5b0c5f1dc22 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.095305] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e184844-004a-4aac-acbc-725a082a641c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.125732] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180733MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1413.125920] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.126084] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1413.201939] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 30e8027d-98b3-4a5f-9eb4-244846cb90e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.202120] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance a7060037-2580-464a-b434-90ffe7314bd1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.202254] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 1ae2e411-d8e4-4abb-8c7b-b907ebba094c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.202400] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.202588] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 5e53196f-984a-4d72-8e00-861ef0751dca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.202755] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 7bea1932-0490-409b-99b0-bd1f3f1a9d5d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.202887] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance da5d9023-f6c1-44f8-9465-36aa2b109924 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.203014] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e75b2848-5dfa-4ffa-b37a-6338c8221dd3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.203139] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance cbe4e626-f063-4877-985f-b3e36c161c9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.203254] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 17cce398-d2f8-47a6-b714-c4e54caec516 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1413.214214] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ca976c34-4eb0-46aa-a243-91401f842c32 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.225124] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 6cf9a284-56a7-4780-b7a1-fedf77f8231a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.236509] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 802f2573-8a44-489d-a0a0-32ca69dc6281 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.246440] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 19702e1f-2d11-492c-9e9e-067d1aa2b6a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.257355] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e42bca43-6e9e-49d5-8cbd-4c57e5f0123b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.269843] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance de6bd0a0-27c9-4f6b-932f-d7fac5fd2e4c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.281442] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 17199829-f72a-4ba6-93b4-da057f00bbc7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.294240] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance dcced840-b57b-47bd-8d7b-bfe971290659 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.305403] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 36980008-f639-4c88-afcf-0dba40420b87 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.319457] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 1a5de688-91c2-4197-a396-c0df71fdbeda has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.330235] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance c142b101-e8b0-4073-9079-5c9730eac176 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.341679] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance b430775d-fcfb-4233-bc78-87d279e82fb5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.354627] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 71c99eda-d55d-4d60-92d2-a5553c3c3760 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1413.354627] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1413.354627] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1413.668024] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f959768-4e5d-4bc0-8855-1cfbd5058f51 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.676659] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20d51e3b-b3d7-4e11-a13d-9f40e050e226 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.710196] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a22db85d-70dc-4904-8a0e-ba2d0207f257 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.718077] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd96f5ed-2190-4281-8516-6a31198dfa1f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.731715] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1413.740696] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1413.754327] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1413.754590] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.628s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1422.973857] env[68798]: DEBUG oslo_concurrency.lockutils [None req-28562a7b-d6db-4eaf-9bee-c1b3aeeb7c89 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquiring lock "cbe4e626-f063-4877-985f-b3e36c161c9e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1429.443500] env[68798]: WARNING oslo_vmware.rw_handles [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1429.443500] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1429.443500] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1429.443500] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1429.443500] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1429.443500] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 1429.443500] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1429.443500] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1429.443500] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1429.443500] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1429.443500] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1429.443500] env[68798]: ERROR oslo_vmware.rw_handles [ 1429.444075] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/771e24b8-e112-4f2c-a63c-9425398852b5/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1429.445915] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1429.446177] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Copying Virtual Disk [datastore1] vmware_temp/771e24b8-e112-4f2c-a63c-9425398852b5/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/771e24b8-e112-4f2c-a63c-9425398852b5/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1429.446459] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d14db74e-ce32-4fb6-a093-c6fc7b509911 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.456159] env[68798]: DEBUG oslo_vmware.api [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Waiting for the task: (returnval){ [ 1429.456159] env[68798]: value = "task-4217643" [ 1429.456159] env[68798]: _type = "Task" [ 1429.456159] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.464358] env[68798]: DEBUG oslo_vmware.api [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Task: {'id': task-4217643, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.968024] env[68798]: DEBUG oslo_vmware.exceptions [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1429.968024] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1429.968442] env[68798]: ERROR nova.compute.manager [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1429.968442] env[68798]: Faults: ['InvalidArgument'] [ 1429.968442] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Traceback (most recent call last): [ 1429.968442] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1429.968442] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] yield resources [ 1429.968442] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1429.968442] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] self.driver.spawn(context, instance, image_meta, [ 1429.968442] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1429.968442] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1429.968442] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1429.968442] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] self._fetch_image_if_missing(context, vi) [ 1429.968442] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1429.968442] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] image_cache(vi, tmp_image_ds_loc) [ 1429.968442] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1429.968442] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] vm_util.copy_virtual_disk( [ 1429.968442] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1429.968442] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] session._wait_for_task(vmdk_copy_task) [ 1429.968442] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1429.968442] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] return self.wait_for_task(task_ref) [ 1429.968442] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1429.968442] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] return evt.wait() [ 1429.968442] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1429.968442] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] result = hub.switch() [ 1429.968442] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1429.968442] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] return self.greenlet.switch() [ 1429.968442] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1429.968442] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] self.f(*self.args, **self.kw) [ 1429.968442] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1429.968442] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] raise exceptions.translate_fault(task_info.error) [ 1429.968442] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1429.968442] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Faults: ['InvalidArgument'] [ 1429.968442] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] [ 1429.969319] env[68798]: INFO nova.compute.manager [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Terminating instance [ 1429.970390] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1429.970598] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1429.970841] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4e53c249-6ad6-47e2-9b05-a9ca10286530 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.973123] env[68798]: DEBUG nova.compute.manager [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1429.973317] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1429.974050] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4770862-798e-4e70-8314-b01e3fb9310f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.981543] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1429.981835] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ab16a96f-15d2-43ef-8f18-f8c58b40ed7b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.984138] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1429.984314] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1429.985280] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4eb8d2a5-3dd0-4a73-bc6a-8771459891c2 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.990238] env[68798]: DEBUG oslo_vmware.api [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Waiting for the task: (returnval){ [ 1429.990238] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52a6cf77-4b65-6215-6038-eadc8b1d062c" [ 1429.990238] env[68798]: _type = "Task" [ 1429.990238] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.999963] env[68798]: DEBUG oslo_vmware.api [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52a6cf77-4b65-6215-6038-eadc8b1d062c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.064845] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1430.065079] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1430.065262] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Deleting the datastore file [datastore1] 30e8027d-98b3-4a5f-9eb4-244846cb90e2 {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1430.065539] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e196310-49a1-4d36-9c54-75f6d9b6a267 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.072655] env[68798]: DEBUG oslo_vmware.api [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Waiting for the task: (returnval){ [ 1430.072655] env[68798]: value = "task-4217645" [ 1430.072655] env[68798]: _type = "Task" [ 1430.072655] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.080888] env[68798]: DEBUG oslo_vmware.api [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Task: {'id': task-4217645, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.500563] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1430.500854] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Creating directory with path [datastore1] vmware_temp/44c3dce2-292f-4e76-aee4-e4ad235b5e90/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1430.501106] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-30dc0efa-55e7-4007-8b0d-925fee42c9da {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.513882] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Created directory with path [datastore1] vmware_temp/44c3dce2-292f-4e76-aee4-e4ad235b5e90/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1430.514097] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Fetch image to [datastore1] vmware_temp/44c3dce2-292f-4e76-aee4-e4ad235b5e90/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1430.514288] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/44c3dce2-292f-4e76-aee4-e4ad235b5e90/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1430.515100] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21b6f3f2-8f1d-42fe-a243-4e5acff56875 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.522709] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca960564-5e17-4c9d-a0c5-28259318670b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.532296] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0de72187-abcf-4076-8889-7fde3e0eda6d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.563789] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe8476ed-1429-485e-85e6-de2edcf563be {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.570491] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-69f9a5c0-a24d-4203-b632-db819219c82c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.582632] env[68798]: DEBUG oslo_vmware.api [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Task: {'id': task-4217645, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.09184} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.582884] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1430.583078] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1430.583257] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1430.583432] env[68798]: INFO nova.compute.manager [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1430.585687] env[68798]: DEBUG nova.compute.claims [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1430.585935] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1430.586206] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1430.592688] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1430.649482] env[68798]: DEBUG oslo_vmware.rw_handles [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/44c3dce2-292f-4e76-aee4-e4ad235b5e90/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1430.710942] env[68798]: DEBUG oslo_vmware.rw_handles [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1430.711196] env[68798]: DEBUG oslo_vmware.rw_handles [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/44c3dce2-292f-4e76-aee4-e4ad235b5e90/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1430.974759] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6bff516-89e5-4970-95b0-8ea26af496a2 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.982800] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f0a4b39-fc8f-49ab-8105-60529be8351e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.013047] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18079120-6567-4fe5-a33d-e73b3f3c71dc {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.020274] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1992131-fdc3-4b79-a114-0f394c88ed26 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.033520] env[68798]: DEBUG nova.compute.provider_tree [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1431.041712] env[68798]: DEBUG nova.scheduler.client.report [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1431.058068] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.472s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1431.058628] env[68798]: ERROR nova.compute.manager [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1431.058628] env[68798]: Faults: ['InvalidArgument'] [ 1431.058628] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Traceback (most recent call last): [ 1431.058628] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1431.058628] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] self.driver.spawn(context, instance, image_meta, [ 1431.058628] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1431.058628] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1431.058628] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1431.058628] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] self._fetch_image_if_missing(context, vi) [ 1431.058628] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1431.058628] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] image_cache(vi, tmp_image_ds_loc) [ 1431.058628] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1431.058628] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] vm_util.copy_virtual_disk( [ 1431.058628] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1431.058628] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] session._wait_for_task(vmdk_copy_task) [ 1431.058628] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1431.058628] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] return self.wait_for_task(task_ref) [ 1431.058628] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1431.058628] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] return evt.wait() [ 1431.058628] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1431.058628] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] result = hub.switch() [ 1431.058628] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1431.058628] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] return self.greenlet.switch() [ 1431.058628] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1431.058628] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] self.f(*self.args, **self.kw) [ 1431.058628] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1431.058628] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] raise exceptions.translate_fault(task_info.error) [ 1431.058628] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1431.058628] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Faults: ['InvalidArgument'] [ 1431.058628] env[68798]: ERROR nova.compute.manager [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] [ 1431.059499] env[68798]: DEBUG nova.compute.utils [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1431.061012] env[68798]: DEBUG nova.compute.manager [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Build of instance 30e8027d-98b3-4a5f-9eb4-244846cb90e2 was re-scheduled: A specified parameter was not correct: fileType [ 1431.061012] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1431.061410] env[68798]: DEBUG nova.compute.manager [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1431.061588] env[68798]: DEBUG nova.compute.manager [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1431.061858] env[68798]: DEBUG nova.compute.manager [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1431.062045] env[68798]: DEBUG nova.network.neutron [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1431.759559] env[68798]: DEBUG nova.network.neutron [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1431.773977] env[68798]: INFO nova.compute.manager [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Took 0.71 seconds to deallocate network for instance. [ 1431.896318] env[68798]: INFO nova.scheduler.client.report [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Deleted allocations for instance 30e8027d-98b3-4a5f-9eb4-244846cb90e2 [ 1431.919286] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2bcd8903-b49d-4112-99b5-ec2233899aec tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Lock "30e8027d-98b3-4a5f-9eb4-244846cb90e2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 622.307s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1431.920755] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c4ed49ac-5d2c-4183-bc99-1de6566741bd tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Lock "30e8027d-98b3-4a5f-9eb4-244846cb90e2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 424.966s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1431.920755] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c4ed49ac-5d2c-4183-bc99-1de6566741bd tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Acquiring lock "30e8027d-98b3-4a5f-9eb4-244846cb90e2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1431.921584] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c4ed49ac-5d2c-4183-bc99-1de6566741bd tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Lock "30e8027d-98b3-4a5f-9eb4-244846cb90e2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1431.921584] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c4ed49ac-5d2c-4183-bc99-1de6566741bd tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Lock "30e8027d-98b3-4a5f-9eb4-244846cb90e2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1431.923208] env[68798]: INFO nova.compute.manager [None req-c4ed49ac-5d2c-4183-bc99-1de6566741bd tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Terminating instance [ 1431.924897] env[68798]: DEBUG nova.compute.manager [None req-c4ed49ac-5d2c-4183-bc99-1de6566741bd tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1431.925104] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-c4ed49ac-5d2c-4183-bc99-1de6566741bd tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1431.925896] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8f4bf0d4-70e1-414d-bc00-e877c5891baa {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.935800] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58843e22-e694-47dc-9830-a1ff8544c3f7 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.947698] env[68798]: DEBUG nova.compute.manager [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1431.971091] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-c4ed49ac-5d2c-4183-bc99-1de6566741bd tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 30e8027d-98b3-4a5f-9eb4-244846cb90e2 could not be found. [ 1431.971831] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-c4ed49ac-5d2c-4183-bc99-1de6566741bd tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1431.971831] env[68798]: INFO nova.compute.manager [None req-c4ed49ac-5d2c-4183-bc99-1de6566741bd tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1431.972452] env[68798]: DEBUG oslo.service.loopingcall [None req-c4ed49ac-5d2c-4183-bc99-1de6566741bd tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1431.972452] env[68798]: DEBUG nova.compute.manager [-] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1431.972452] env[68798]: DEBUG nova.network.neutron [-] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1432.008505] env[68798]: DEBUG nova.network.neutron [-] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1432.013576] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1432.013823] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1432.015778] env[68798]: INFO nova.compute.claims [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1432.018708] env[68798]: INFO nova.compute.manager [-] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] Took 0.05 seconds to deallocate network for instance. [ 1432.102522] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c4ed49ac-5d2c-4183-bc99-1de6566741bd tempest-ServersAdminTestJSON-2127475638 tempest-ServersAdminTestJSON-2127475638-project-member] Lock "30e8027d-98b3-4a5f-9eb4-244846cb90e2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.182s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1432.103377] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "30e8027d-98b3-4a5f-9eb4-244846cb90e2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 256.593s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1432.103567] env[68798]: INFO nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 30e8027d-98b3-4a5f-9eb4-244846cb90e2] During sync_power_state the instance has a pending task (deleting). Skip. [ 1432.103744] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "30e8027d-98b3-4a5f-9eb4-244846cb90e2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1432.343342] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a17ad3e-04db-4311-9b81-6585e700a633 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.351095] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fde1d5a-cb8d-4e71-a0f5-127d7d8ca811 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.381307] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f181a94-2082-4d9d-b82e-574ab1508a95 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.388972] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-602bad94-85f3-471a-960e-30d46ad59948 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.404267] env[68798]: DEBUG nova.compute.provider_tree [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1432.414498] env[68798]: DEBUG nova.scheduler.client.report [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1432.431149] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.417s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1432.431700] env[68798]: DEBUG nova.compute.manager [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1432.471156] env[68798]: DEBUG nova.compute.utils [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1432.471800] env[68798]: DEBUG nova.compute.manager [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Not allocating networking since 'none' was specified. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 1432.486352] env[68798]: DEBUG nova.compute.manager [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1432.555489] env[68798]: DEBUG nova.compute.manager [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1432.587620] env[68798]: DEBUG nova.virt.hardware [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1432.587877] env[68798]: DEBUG nova.virt.hardware [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1432.588044] env[68798]: DEBUG nova.virt.hardware [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1432.588232] env[68798]: DEBUG nova.virt.hardware [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1432.588378] env[68798]: DEBUG nova.virt.hardware [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1432.588525] env[68798]: DEBUG nova.virt.hardware [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1432.588742] env[68798]: DEBUG nova.virt.hardware [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1432.588903] env[68798]: DEBUG nova.virt.hardware [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1432.589095] env[68798]: DEBUG nova.virt.hardware [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1432.589269] env[68798]: DEBUG nova.virt.hardware [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1432.589447] env[68798]: DEBUG nova.virt.hardware [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1432.590388] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ee6ee00-5586-4dd1-92ed-1342018069bd {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.600210] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79ec56a8-9565-4ca6-a359-37cb2f917cba {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.614728] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Instance VIF info [] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1432.620425] env[68798]: DEBUG oslo.service.loopingcall [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1432.620683] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1432.620900] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ac43067c-a9db-4a4d-8276-91582516694e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.639424] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1432.639424] env[68798]: value = "task-4217646" [ 1432.639424] env[68798]: _type = "Task" [ 1432.639424] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.648067] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217646, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.150144] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217646, 'name': CreateVM_Task, 'duration_secs': 0.276608} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.150522] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1433.150741] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1433.150906] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1433.151251] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1433.151505] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09ee09a2-cbd5-43b6-ba64-cc61d9c4a04e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.156645] env[68798]: DEBUG oslo_vmware.api [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Waiting for the task: (returnval){ [ 1433.156645] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]5259c973-cd73-ef2c-6097-4f234e65d52c" [ 1433.156645] env[68798]: _type = "Task" [ 1433.156645] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.165210] env[68798]: DEBUG oslo_vmware.api [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]5259c973-cd73-ef2c-6097-4f234e65d52c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.667125] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1433.667399] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1433.667619] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1436.297670] env[68798]: DEBUG oslo_concurrency.lockutils [None req-b9f35189-6ffb-4671-875c-64380177de1b tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquiring lock "ca976c34-4eb0-46aa-a243-91401f842c32" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1451.990161] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Acquiring lock "a4e41ed1-2b39-4475-bd13-1680ff46ff6f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1451.990161] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Lock "a4e41ed1-2b39-4475-bd13-1680ff46ff6f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1453.464733] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4c8e15e1-1d7d-4038-b10c-95a93fadfe8b tempest-AttachVolumeNegativeTest-740411461 tempest-AttachVolumeNegativeTest-740411461-project-member] Acquiring lock "de6aeec1-7138-4fac-ac3b-aaced21ef9da" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1453.465093] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4c8e15e1-1d7d-4038-b10c-95a93fadfe8b tempest-AttachVolumeNegativeTest-740411461 tempest-AttachVolumeNegativeTest-740411461-project-member] Lock "de6aeec1-7138-4fac-ac3b-aaced21ef9da" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1455.944635] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e619623d-0ddf-4fa5-b0f4-5ac6117be62e tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Acquiring lock "3ed17cef-5946-475d-994c-568aa7f83ea4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1455.944978] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e619623d-0ddf-4fa5-b0f4-5ac6117be62e tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Lock "3ed17cef-5946-475d-994c-568aa7f83ea4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1465.049426] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1465.049742] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Cleaning up deleted instances {{(pid=68798) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11220}} [ 1465.065612] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] There are 0 instances to clean {{(pid=68798) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 1465.065856] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1466.070105] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1466.070105] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1466.070105] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1466.070105] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1466.092259] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1466.092514] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1466.092655] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1466.092781] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1466.092903] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1466.093032] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1466.093198] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1466.093269] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1466.093383] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1466.093497] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1466.093612] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1466.094111] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1467.049388] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1468.048899] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1468.049272] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1468.049313] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1469.049588] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1469.049971] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1470.048309] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1470.048595] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Cleaning up deleted instances with incomplete migration {{(pid=68798) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11258}} [ 1475.062032] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1475.074884] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1475.075133] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1475.075307] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1475.075466] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1475.077026] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ba0008f-10f7-4179-b142-098b8ba6a7d0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.085987] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6245008-4e41-43c4-8633-2fa4b5212e49 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.101428] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6d28857-e127-45eb-b371-5836bbd24074 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.108179] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-422e9cfa-01b2-4bd5-8420-6715c2aa5da9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.137171] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180735MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1475.137305] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1475.137503] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1475.279834] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance a7060037-2580-464a-b434-90ffe7314bd1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1475.279976] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 1ae2e411-d8e4-4abb-8c7b-b907ebba094c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1475.280127] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1475.280263] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 5e53196f-984a-4d72-8e00-861ef0751dca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1475.280381] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 7bea1932-0490-409b-99b0-bd1f3f1a9d5d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1475.280500] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance da5d9023-f6c1-44f8-9465-36aa2b109924 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1475.280617] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e75b2848-5dfa-4ffa-b37a-6338c8221dd3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1475.280732] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance cbe4e626-f063-4877-985f-b3e36c161c9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1475.280845] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 17cce398-d2f8-47a6-b714-c4e54caec516 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1475.280959] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ca976c34-4eb0-46aa-a243-91401f842c32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1475.295621] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance de6bd0a0-27c9-4f6b-932f-d7fac5fd2e4c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1475.307996] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 17199829-f72a-4ba6-93b4-da057f00bbc7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1475.319249] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance dcced840-b57b-47bd-8d7b-bfe971290659 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1475.330040] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 36980008-f639-4c88-afcf-0dba40420b87 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1475.342800] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 1a5de688-91c2-4197-a396-c0df71fdbeda has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1475.353552] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance c142b101-e8b0-4073-9079-5c9730eac176 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1475.363779] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance b430775d-fcfb-4233-bc78-87d279e82fb5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1475.374179] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 71c99eda-d55d-4d60-92d2-a5553c3c3760 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1475.385937] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance a4e41ed1-2b39-4475-bd13-1680ff46ff6f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1475.397913] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance de6aeec1-7138-4fac-ac3b-aaced21ef9da has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1475.432830] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 3ed17cef-5946-475d-994c-568aa7f83ea4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1475.433133] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1475.433295] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1475.450469] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Refreshing inventories for resource provider 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1475.467677] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Updating ProviderTree inventory for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1475.467848] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Updating inventory in ProviderTree for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1475.480649] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Refreshing aggregate associations for resource provider 855bb535-a51f-4f9d-8f32-8a3291b17319, aggregates: None {{(pid=68798) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1475.500384] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Refreshing trait associations for resource provider 855bb535-a51f-4f9d-8f32-8a3291b17319, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=68798) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1475.770171] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22aed7a4-6840-43ff-bb0c-837d62fd4a88 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.778978] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f124d7fe-f7c6-4e0b-8579-90e478b68b9c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.810461] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e89997-c61a-4516-a750-aafb47b04f30 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.818743] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdbbaf62-91f0-4a8c-ad83-9c0b04685d6d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.832128] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1475.843488] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1475.864749] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1475.865031] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.727s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.906023] env[68798]: WARNING oslo_vmware.rw_handles [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1476.906023] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1476.906023] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1476.906023] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1476.906023] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1476.906023] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 1476.906023] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1476.906023] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1476.906023] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1476.906023] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1476.906023] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1476.906023] env[68798]: ERROR oslo_vmware.rw_handles [ 1476.906023] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/44c3dce2-292f-4e76-aee4-e4ad235b5e90/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1476.906023] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1476.906023] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Copying Virtual Disk [datastore1] vmware_temp/44c3dce2-292f-4e76-aee4-e4ad235b5e90/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/44c3dce2-292f-4e76-aee4-e4ad235b5e90/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1476.907257] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dc95ea38-7345-4c26-a8f6-b12e60724dfe {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.916628] env[68798]: DEBUG oslo_vmware.api [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Waiting for the task: (returnval){ [ 1476.916628] env[68798]: value = "task-4217647" [ 1476.916628] env[68798]: _type = "Task" [ 1476.916628] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.928417] env[68798]: DEBUG oslo_vmware.api [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Task: {'id': task-4217647, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.428117] env[68798]: DEBUG oslo_vmware.exceptions [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1477.428420] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1477.429013] env[68798]: ERROR nova.compute.manager [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1477.429013] env[68798]: Faults: ['InvalidArgument'] [ 1477.429013] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] Traceback (most recent call last): [ 1477.429013] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1477.429013] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] yield resources [ 1477.429013] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1477.429013] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] self.driver.spawn(context, instance, image_meta, [ 1477.429013] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1477.429013] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1477.429013] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1477.429013] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] self._fetch_image_if_missing(context, vi) [ 1477.429013] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1477.429013] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] image_cache(vi, tmp_image_ds_loc) [ 1477.429013] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1477.429013] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] vm_util.copy_virtual_disk( [ 1477.429013] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1477.429013] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] session._wait_for_task(vmdk_copy_task) [ 1477.429013] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1477.429013] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] return self.wait_for_task(task_ref) [ 1477.429013] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1477.429013] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] return evt.wait() [ 1477.429013] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1477.429013] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] result = hub.switch() [ 1477.429013] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1477.429013] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] return self.greenlet.switch() [ 1477.429013] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1477.429013] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] self.f(*self.args, **self.kw) [ 1477.429013] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1477.429013] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] raise exceptions.translate_fault(task_info.error) [ 1477.429013] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1477.429013] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] Faults: ['InvalidArgument'] [ 1477.429013] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] [ 1477.429928] env[68798]: INFO nova.compute.manager [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Terminating instance [ 1477.431136] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1477.431219] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1477.431898] env[68798]: DEBUG nova.compute.manager [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1477.432186] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1477.432518] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-863d3bd8-6ac3-4d8a-bd1a-5b62014bf05b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.439029] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9cf201e-b6f0-427d-b41c-c84d85855696 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.443373] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1477.443647] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4d04030a-5171-4f31-b65b-e35ab954f52f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.447022] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1477.447022] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1477.447453] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-962bb80e-c369-4a76-b522-f09b4adfa799 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.453111] env[68798]: DEBUG oslo_vmware.api [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Waiting for the task: (returnval){ [ 1477.453111] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52a12a59-2e7c-309d-3bf3-69c4f29d9818" [ 1477.453111] env[68798]: _type = "Task" [ 1477.453111] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.463680] env[68798]: DEBUG oslo_vmware.api [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52a12a59-2e7c-309d-3bf3-69c4f29d9818, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.563509] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1477.563870] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1477.564141] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Deleting the datastore file [datastore1] a7060037-2580-464a-b434-90ffe7314bd1 {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1477.564485] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-090156b0-d6b5-40fd-a62c-846b47e4a913 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.573076] env[68798]: DEBUG oslo_vmware.api [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Waiting for the task: (returnval){ [ 1477.573076] env[68798]: value = "task-4217649" [ 1477.573076] env[68798]: _type = "Task" [ 1477.573076] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.581811] env[68798]: DEBUG oslo_vmware.api [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Task: {'id': task-4217649, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.964177] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1477.964521] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Creating directory with path [datastore1] vmware_temp/58fa299f-190b-406f-972e-5552cf580d51/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1477.964791] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-42b96e7b-67ff-48f5-99df-2289f1e13982 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.978930] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Created directory with path [datastore1] vmware_temp/58fa299f-190b-406f-972e-5552cf580d51/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1477.979162] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Fetch image to [datastore1] vmware_temp/58fa299f-190b-406f-972e-5552cf580d51/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1477.979342] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/58fa299f-190b-406f-972e-5552cf580d51/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1477.980228] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e5ee00a-fd7e-4a5b-adde-216229f97de6 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.988213] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf4ed7a-52c9-40c9-ad0d-5ab277d32e31 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.998643] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9b35532-e38a-4a56-b108-7e898ef1d55d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.032730] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baf3a640-dd03-4935-8831-a99a4d034288 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.040064] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-226cb20a-07d6-4fb9-b758-e155ae50f401 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.064746] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1478.083945] env[68798]: DEBUG oslo_vmware.api [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Task: {'id': task-4217649, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.094337} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.086129] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1478.086249] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1478.086441] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1478.086617] env[68798]: INFO nova.compute.manager [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Took 0.65 seconds to destroy the instance on the hypervisor. [ 1478.090086] env[68798]: DEBUG nova.compute.claims [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1478.090266] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1478.090480] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.134890] env[68798]: DEBUG oslo_vmware.rw_handles [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/58fa299f-190b-406f-972e-5552cf580d51/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1478.195790] env[68798]: DEBUG oslo_vmware.rw_handles [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1478.195933] env[68798]: DEBUG oslo_vmware.rw_handles [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/58fa299f-190b-406f-972e-5552cf580d51/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1478.446478] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba59c971-843d-4d6b-963a-1bf3a4b135ee {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.454778] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91bc1d0c-22d7-49c8-9e96-999670ed90db {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.487655] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a8293d-b846-4d3e-9340-9676fded4ce3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.496339] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5814c34b-198c-4464-afce-938127291661 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.511577] env[68798]: DEBUG nova.compute.provider_tree [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1478.522616] env[68798]: DEBUG nova.scheduler.client.report [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1478.540274] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.450s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.540916] env[68798]: ERROR nova.compute.manager [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1478.540916] env[68798]: Faults: ['InvalidArgument'] [ 1478.540916] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] Traceback (most recent call last): [ 1478.540916] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1478.540916] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] self.driver.spawn(context, instance, image_meta, [ 1478.540916] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1478.540916] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1478.540916] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1478.540916] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] self._fetch_image_if_missing(context, vi) [ 1478.540916] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1478.540916] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] image_cache(vi, tmp_image_ds_loc) [ 1478.540916] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1478.540916] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] vm_util.copy_virtual_disk( [ 1478.540916] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1478.540916] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] session._wait_for_task(vmdk_copy_task) [ 1478.540916] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1478.540916] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] return self.wait_for_task(task_ref) [ 1478.540916] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1478.540916] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] return evt.wait() [ 1478.540916] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1478.540916] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] result = hub.switch() [ 1478.540916] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1478.540916] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] return self.greenlet.switch() [ 1478.540916] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1478.540916] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] self.f(*self.args, **self.kw) [ 1478.540916] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1478.540916] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] raise exceptions.translate_fault(task_info.error) [ 1478.540916] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1478.540916] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] Faults: ['InvalidArgument'] [ 1478.540916] env[68798]: ERROR nova.compute.manager [instance: a7060037-2580-464a-b434-90ffe7314bd1] [ 1478.541867] env[68798]: DEBUG nova.compute.utils [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1478.543876] env[68798]: DEBUG nova.compute.manager [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Build of instance a7060037-2580-464a-b434-90ffe7314bd1 was re-scheduled: A specified parameter was not correct: fileType [ 1478.543876] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1478.544292] env[68798]: DEBUG nova.compute.manager [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1478.544522] env[68798]: DEBUG nova.compute.manager [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1478.544754] env[68798]: DEBUG nova.compute.manager [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1478.544918] env[68798]: DEBUG nova.network.neutron [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1479.440185] env[68798]: DEBUG nova.network.neutron [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1479.459667] env[68798]: INFO nova.compute.manager [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Took 0.91 seconds to deallocate network for instance. [ 1479.602802] env[68798]: INFO nova.scheduler.client.report [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Deleted allocations for instance a7060037-2580-464a-b434-90ffe7314bd1 [ 1479.647018] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d7d25fee-9086-42e3-a90a-0cc13c2676c7 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Lock "a7060037-2580-464a-b434-90ffe7314bd1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 631.900s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1479.647464] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d65cbdac-0e16-4465-83d7-1b6102f8ee07 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Lock "a7060037-2580-464a-b434-90ffe7314bd1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 434.879s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1479.647805] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d65cbdac-0e16-4465-83d7-1b6102f8ee07 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Acquiring lock "a7060037-2580-464a-b434-90ffe7314bd1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1479.649268] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d65cbdac-0e16-4465-83d7-1b6102f8ee07 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Lock "a7060037-2580-464a-b434-90ffe7314bd1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1479.649493] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d65cbdac-0e16-4465-83d7-1b6102f8ee07 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Lock "a7060037-2580-464a-b434-90ffe7314bd1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1479.651808] env[68798]: INFO nova.compute.manager [None req-d65cbdac-0e16-4465-83d7-1b6102f8ee07 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Terminating instance [ 1479.653566] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d65cbdac-0e16-4465-83d7-1b6102f8ee07 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Acquiring lock "refresh_cache-a7060037-2580-464a-b434-90ffe7314bd1" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1479.653725] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d65cbdac-0e16-4465-83d7-1b6102f8ee07 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Acquired lock "refresh_cache-a7060037-2580-464a-b434-90ffe7314bd1" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1479.653895] env[68798]: DEBUG nova.network.neutron [None req-d65cbdac-0e16-4465-83d7-1b6102f8ee07 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1479.666603] env[68798]: DEBUG nova.compute.manager [None req-8ba38e10-c6ac-4617-acbe-fa6c10cc753b tempest-ServerActionsTestOtherA-140659728 tempest-ServerActionsTestOtherA-140659728-project-member] [instance: 6cf9a284-56a7-4780-b7a1-fedf77f8231a] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1479.690911] env[68798]: DEBUG nova.network.neutron [None req-d65cbdac-0e16-4465-83d7-1b6102f8ee07 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1479.710902] env[68798]: DEBUG nova.compute.manager [None req-8ba38e10-c6ac-4617-acbe-fa6c10cc753b tempest-ServerActionsTestOtherA-140659728 tempest-ServerActionsTestOtherA-140659728-project-member] [instance: 6cf9a284-56a7-4780-b7a1-fedf77f8231a] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1479.737607] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8ba38e10-c6ac-4617-acbe-fa6c10cc753b tempest-ServerActionsTestOtherA-140659728 tempest-ServerActionsTestOtherA-140659728-project-member] Lock "6cf9a284-56a7-4780-b7a1-fedf77f8231a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 231.199s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1479.748445] env[68798]: DEBUG nova.compute.manager [None req-3aa5332d-f931-4dbf-b50f-23803f72ffb5 tempest-VolumesAdminNegativeTest-2011042876 tempest-VolumesAdminNegativeTest-2011042876-project-member] [instance: 802f2573-8a44-489d-a0a0-32ca69dc6281] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1479.780501] env[68798]: DEBUG nova.compute.manager [None req-3aa5332d-f931-4dbf-b50f-23803f72ffb5 tempest-VolumesAdminNegativeTest-2011042876 tempest-VolumesAdminNegativeTest-2011042876-project-member] [instance: 802f2573-8a44-489d-a0a0-32ca69dc6281] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1479.814035] env[68798]: DEBUG oslo_concurrency.lockutils [None req-3aa5332d-f931-4dbf-b50f-23803f72ffb5 tempest-VolumesAdminNegativeTest-2011042876 tempest-VolumesAdminNegativeTest-2011042876-project-member] Lock "802f2573-8a44-489d-a0a0-32ca69dc6281" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.504s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1479.822727] env[68798]: DEBUG nova.network.neutron [None req-d65cbdac-0e16-4465-83d7-1b6102f8ee07 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1479.824907] env[68798]: DEBUG nova.compute.manager [None req-b1df3437-ff84-4052-9da2-237214c20a03 tempest-AttachVolumeNegativeTest-740411461 tempest-AttachVolumeNegativeTest-740411461-project-member] [instance: 19702e1f-2d11-492c-9e9e-067d1aa2b6a4] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1479.832631] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d65cbdac-0e16-4465-83d7-1b6102f8ee07 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Releasing lock "refresh_cache-a7060037-2580-464a-b434-90ffe7314bd1" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1479.833038] env[68798]: DEBUG nova.compute.manager [None req-d65cbdac-0e16-4465-83d7-1b6102f8ee07 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1479.833290] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-d65cbdac-0e16-4465-83d7-1b6102f8ee07 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1479.833839] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-882b20da-bace-41cc-8e2c-25ccc8340c6a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.845406] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f032233a-29ee-4067-9e88-793da3be6688 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.859651] env[68798]: DEBUG nova.compute.manager [None req-b1df3437-ff84-4052-9da2-237214c20a03 tempest-AttachVolumeNegativeTest-740411461 tempest-AttachVolumeNegativeTest-740411461-project-member] [instance: 19702e1f-2d11-492c-9e9e-067d1aa2b6a4] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1479.883016] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-d65cbdac-0e16-4465-83d7-1b6102f8ee07 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a7060037-2580-464a-b434-90ffe7314bd1 could not be found. [ 1479.883275] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-d65cbdac-0e16-4465-83d7-1b6102f8ee07 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1479.883498] env[68798]: INFO nova.compute.manager [None req-d65cbdac-0e16-4465-83d7-1b6102f8ee07 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1479.883763] env[68798]: DEBUG oslo.service.loopingcall [None req-d65cbdac-0e16-4465-83d7-1b6102f8ee07 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1479.886365] env[68798]: DEBUG nova.compute.manager [-] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1479.886491] env[68798]: DEBUG nova.network.neutron [-] [instance: a7060037-2580-464a-b434-90ffe7314bd1] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1479.898770] env[68798]: DEBUG oslo_concurrency.lockutils [None req-b1df3437-ff84-4052-9da2-237214c20a03 tempest-AttachVolumeNegativeTest-740411461 tempest-AttachVolumeNegativeTest-740411461-project-member] Lock "19702e1f-2d11-492c-9e9e-067d1aa2b6a4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.884s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1479.908562] env[68798]: DEBUG nova.network.neutron [-] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1479.910896] env[68798]: DEBUG nova.compute.manager [None req-2dd0e14c-d042-44b2-8e8b-1f56e3c72134 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: e42bca43-6e9e-49d5-8cbd-4c57e5f0123b] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1479.919708] env[68798]: DEBUG nova.network.neutron [-] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1479.930736] env[68798]: INFO nova.compute.manager [-] [instance: a7060037-2580-464a-b434-90ffe7314bd1] Took 0.04 seconds to deallocate network for instance. [ 1479.948301] env[68798]: DEBUG nova.compute.manager [None req-2dd0e14c-d042-44b2-8e8b-1f56e3c72134 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: e42bca43-6e9e-49d5-8cbd-4c57e5f0123b] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1479.973597] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2dd0e14c-d042-44b2-8e8b-1f56e3c72134 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Lock "e42bca43-6e9e-49d5-8cbd-4c57e5f0123b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 221.342s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1479.991043] env[68798]: DEBUG nova.compute.manager [None req-962934df-df48-4bb7-9d93-2c0b4ceee911 tempest-MultipleCreateTestJSON-252684274 tempest-MultipleCreateTestJSON-252684274-project-member] [instance: de6bd0a0-27c9-4f6b-932f-d7fac5fd2e4c] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1480.024899] env[68798]: DEBUG nova.compute.manager [None req-962934df-df48-4bb7-9d93-2c0b4ceee911 tempest-MultipleCreateTestJSON-252684274 tempest-MultipleCreateTestJSON-252684274-project-member] [instance: de6bd0a0-27c9-4f6b-932f-d7fac5fd2e4c] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1480.049976] env[68798]: DEBUG oslo_concurrency.lockutils [None req-962934df-df48-4bb7-9d93-2c0b4ceee911 tempest-MultipleCreateTestJSON-252684274 tempest-MultipleCreateTestJSON-252684274-project-member] Lock "de6bd0a0-27c9-4f6b-932f-d7fac5fd2e4c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.101s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.054761] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d65cbdac-0e16-4465-83d7-1b6102f8ee07 tempest-ServersTestMultiNic-1784709761 tempest-ServersTestMultiNic-1784709761-project-member] Lock "a7060037-2580-464a-b434-90ffe7314bd1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.407s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.058876] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "a7060037-2580-464a-b434-90ffe7314bd1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 304.548s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1480.059108] env[68798]: INFO nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: a7060037-2580-464a-b434-90ffe7314bd1] During sync_power_state the instance has a pending task (deleting). Skip. [ 1480.059291] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "a7060037-2580-464a-b434-90ffe7314bd1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.060433] env[68798]: DEBUG nova.compute.manager [None req-962934df-df48-4bb7-9d93-2c0b4ceee911 tempest-MultipleCreateTestJSON-252684274 tempest-MultipleCreateTestJSON-252684274-project-member] [instance: 17199829-f72a-4ba6-93b4-da057f00bbc7] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1480.085422] env[68798]: DEBUG nova.compute.manager [None req-962934df-df48-4bb7-9d93-2c0b4ceee911 tempest-MultipleCreateTestJSON-252684274 tempest-MultipleCreateTestJSON-252684274-project-member] [instance: 17199829-f72a-4ba6-93b4-da057f00bbc7] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1480.108909] env[68798]: DEBUG oslo_concurrency.lockutils [None req-962934df-df48-4bb7-9d93-2c0b4ceee911 tempest-MultipleCreateTestJSON-252684274 tempest-MultipleCreateTestJSON-252684274-project-member] Lock "17199829-f72a-4ba6-93b4-da057f00bbc7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.129s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.118696] env[68798]: DEBUG nova.compute.manager [None req-6996578d-8fa5-4660-82e7-18a110b7045e tempest-AttachVolumeShelveTestJSON-1210845358 tempest-AttachVolumeShelveTestJSON-1210845358-project-member] [instance: dcced840-b57b-47bd-8d7b-bfe971290659] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1480.150638] env[68798]: DEBUG nova.compute.manager [None req-6996578d-8fa5-4660-82e7-18a110b7045e tempest-AttachVolumeShelveTestJSON-1210845358 tempest-AttachVolumeShelveTestJSON-1210845358-project-member] [instance: dcced840-b57b-47bd-8d7b-bfe971290659] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1480.178019] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6996578d-8fa5-4660-82e7-18a110b7045e tempest-AttachVolumeShelveTestJSON-1210845358 tempest-AttachVolumeShelveTestJSON-1210845358-project-member] Lock "dcced840-b57b-47bd-8d7b-bfe971290659" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.411s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.186305] env[68798]: DEBUG nova.compute.manager [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1480.255506] env[68798]: DEBUG oslo_concurrency.lockutils [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1480.255776] env[68798]: DEBUG oslo_concurrency.lockutils [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1480.257371] env[68798]: INFO nova.compute.claims [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1480.583879] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f363477-d629-4acf-a588-35bb323a915b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.592672] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11cc0629-ef89-4822-bb82-612681d80f00 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.624838] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b994584-208d-4d9c-9e6f-27e205b0aa89 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.636018] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc1e8614-0e84-4e3a-b86a-3d814f745bfc {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.651351] env[68798]: DEBUG nova.compute.provider_tree [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1480.665086] env[68798]: DEBUG nova.scheduler.client.report [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1480.679306] env[68798]: DEBUG oslo_concurrency.lockutils [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.423s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.679879] env[68798]: DEBUG nova.compute.manager [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1480.733792] env[68798]: DEBUG nova.compute.utils [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1480.735310] env[68798]: DEBUG nova.compute.manager [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1480.735455] env[68798]: DEBUG nova.network.neutron [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1480.746291] env[68798]: DEBUG nova.compute.manager [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1480.827857] env[68798]: DEBUG nova.compute.manager [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1480.862959] env[68798]: DEBUG nova.virt.hardware [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1480.863224] env[68798]: DEBUG nova.virt.hardware [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1480.863387] env[68798]: DEBUG nova.virt.hardware [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1480.863573] env[68798]: DEBUG nova.virt.hardware [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1480.863743] env[68798]: DEBUG nova.virt.hardware [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1480.863872] env[68798]: DEBUG nova.virt.hardware [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1480.865698] env[68798]: DEBUG nova.virt.hardware [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1480.865946] env[68798]: DEBUG nova.virt.hardware [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1480.866307] env[68798]: DEBUG nova.virt.hardware [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1480.866371] env[68798]: DEBUG nova.virt.hardware [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1480.866515] env[68798]: DEBUG nova.virt.hardware [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1480.867733] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e6c30a-bc4b-440f-9d9c-59ae36ebecb6 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.871651] env[68798]: DEBUG nova.policy [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '301bb8e1a97c432a92252091e830f815', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cb153c1ecdcf4cb6b280665b504f5eab', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 1480.879709] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fed263c-d205-41c7-a683-57778400a6e4 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.798795] env[68798]: DEBUG nova.network.neutron [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Successfully created port: 4a15d22a-8877-45d8-8c00-bce9e4de0fda {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1482.825176] env[68798]: DEBUG nova.network.neutron [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Successfully updated port: 4a15d22a-8877-45d8-8c00-bce9e4de0fda {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1482.841080] env[68798]: DEBUG oslo_concurrency.lockutils [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Acquiring lock "refresh_cache-36980008-f639-4c88-afcf-0dba40420b87" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1482.841080] env[68798]: DEBUG oslo_concurrency.lockutils [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Acquired lock "refresh_cache-36980008-f639-4c88-afcf-0dba40420b87" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1482.841195] env[68798]: DEBUG nova.network.neutron [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1482.895419] env[68798]: DEBUG nova.network.neutron [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1483.191692] env[68798]: DEBUG nova.network.neutron [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Updating instance_info_cache with network_info: [{"id": "4a15d22a-8877-45d8-8c00-bce9e4de0fda", "address": "fa:16:3e:f1:1b:f8", "network": {"id": "43209d73-d9d2-49e9-90a2-621df2971dbb", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-241836634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb153c1ecdcf4cb6b280665b504f5eab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35fcdc55-dc29-451b-ad56-3a03b044dc81", "external-id": "nsx-vlan-transportzone-552", "segmentation_id": 552, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a15d22a-88", "ovs_interfaceid": "4a15d22a-8877-45d8-8c00-bce9e4de0fda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1483.212768] env[68798]: DEBUG oslo_concurrency.lockutils [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Releasing lock "refresh_cache-36980008-f639-4c88-afcf-0dba40420b87" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1483.213250] env[68798]: DEBUG nova.compute.manager [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Instance network_info: |[{"id": "4a15d22a-8877-45d8-8c00-bce9e4de0fda", "address": "fa:16:3e:f1:1b:f8", "network": {"id": "43209d73-d9d2-49e9-90a2-621df2971dbb", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-241836634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb153c1ecdcf4cb6b280665b504f5eab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35fcdc55-dc29-451b-ad56-3a03b044dc81", "external-id": "nsx-vlan-transportzone-552", "segmentation_id": 552, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a15d22a-88", "ovs_interfaceid": "4a15d22a-8877-45d8-8c00-bce9e4de0fda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1483.214192] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:1b:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35fcdc55-dc29-451b-ad56-3a03b044dc81', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4a15d22a-8877-45d8-8c00-bce9e4de0fda', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1483.222370] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Creating folder: Project (cb153c1ecdcf4cb6b280665b504f5eab). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1483.223595] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aa6dd536-e1b9-4356-ab5b-76d599961558 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.238927] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Created folder: Project (cb153c1ecdcf4cb6b280665b504f5eab) in parent group-v834492. [ 1483.239165] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Creating folder: Instances. Parent ref: group-v834573. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1483.239433] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2d1183c6-0bec-4afc-90cc-a667276e45d0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.250556] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Created folder: Instances in parent group-v834573. [ 1483.250878] env[68798]: DEBUG oslo.service.loopingcall [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1483.251157] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1483.251419] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f74cdf31-36b5-42a8-9842-7d7c36fb14da {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.276164] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1483.276164] env[68798]: value = "task-4217652" [ 1483.276164] env[68798]: _type = "Task" [ 1483.276164] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.285389] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217652, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.520700] env[68798]: DEBUG nova.compute.manager [req-2093cf5b-dba2-40d1-8be7-df4344e4d082 req-257f21ea-2ca1-48bc-886d-882a5c8d7183 service nova] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Received event network-vif-plugged-4a15d22a-8877-45d8-8c00-bce9e4de0fda {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1483.521052] env[68798]: DEBUG oslo_concurrency.lockutils [req-2093cf5b-dba2-40d1-8be7-df4344e4d082 req-257f21ea-2ca1-48bc-886d-882a5c8d7183 service nova] Acquiring lock "36980008-f639-4c88-afcf-0dba40420b87-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1483.521376] env[68798]: DEBUG oslo_concurrency.lockutils [req-2093cf5b-dba2-40d1-8be7-df4344e4d082 req-257f21ea-2ca1-48bc-886d-882a5c8d7183 service nova] Lock "36980008-f639-4c88-afcf-0dba40420b87-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1483.523562] env[68798]: DEBUG oslo_concurrency.lockutils [req-2093cf5b-dba2-40d1-8be7-df4344e4d082 req-257f21ea-2ca1-48bc-886d-882a5c8d7183 service nova] Lock "36980008-f639-4c88-afcf-0dba40420b87-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1483.523562] env[68798]: DEBUG nova.compute.manager [req-2093cf5b-dba2-40d1-8be7-df4344e4d082 req-257f21ea-2ca1-48bc-886d-882a5c8d7183 service nova] [instance: 36980008-f639-4c88-afcf-0dba40420b87] No waiting events found dispatching network-vif-plugged-4a15d22a-8877-45d8-8c00-bce9e4de0fda {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1483.523562] env[68798]: WARNING nova.compute.manager [req-2093cf5b-dba2-40d1-8be7-df4344e4d082 req-257f21ea-2ca1-48bc-886d-882a5c8d7183 service nova] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Received unexpected event network-vif-plugged-4a15d22a-8877-45d8-8c00-bce9e4de0fda for instance with vm_state building and task_state spawning. [ 1483.523562] env[68798]: DEBUG nova.compute.manager [req-2093cf5b-dba2-40d1-8be7-df4344e4d082 req-257f21ea-2ca1-48bc-886d-882a5c8d7183 service nova] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Received event network-changed-4a15d22a-8877-45d8-8c00-bce9e4de0fda {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1483.523562] env[68798]: DEBUG nova.compute.manager [req-2093cf5b-dba2-40d1-8be7-df4344e4d082 req-257f21ea-2ca1-48bc-886d-882a5c8d7183 service nova] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Refreshing instance network info cache due to event network-changed-4a15d22a-8877-45d8-8c00-bce9e4de0fda. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1483.523562] env[68798]: DEBUG oslo_concurrency.lockutils [req-2093cf5b-dba2-40d1-8be7-df4344e4d082 req-257f21ea-2ca1-48bc-886d-882a5c8d7183 service nova] Acquiring lock "refresh_cache-36980008-f639-4c88-afcf-0dba40420b87" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1483.525156] env[68798]: DEBUG oslo_concurrency.lockutils [req-2093cf5b-dba2-40d1-8be7-df4344e4d082 req-257f21ea-2ca1-48bc-886d-882a5c8d7183 service nova] Acquired lock "refresh_cache-36980008-f639-4c88-afcf-0dba40420b87" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1483.525905] env[68798]: DEBUG nova.network.neutron [req-2093cf5b-dba2-40d1-8be7-df4344e4d082 req-257f21ea-2ca1-48bc-886d-882a5c8d7183 service nova] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Refreshing network info cache for port 4a15d22a-8877-45d8-8c00-bce9e4de0fda {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1483.684730] env[68798]: DEBUG oslo_concurrency.lockutils [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Acquiring lock "ab6c3a4b-2208-49c8-b92f-1f08c0b225f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1483.684968] env[68798]: DEBUG oslo_concurrency.lockutils [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Lock "ab6c3a4b-2208-49c8-b92f-1f08c0b225f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1483.786572] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217652, 'name': CreateVM_Task, 'duration_secs': 0.329513} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.786773] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1483.787455] env[68798]: DEBUG oslo_concurrency.lockutils [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1483.787619] env[68798]: DEBUG oslo_concurrency.lockutils [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1483.788245] env[68798]: DEBUG oslo_concurrency.lockutils [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1483.788503] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c345d89d-afb4-49b8-9928-675a19e85079 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.793895] env[68798]: DEBUG oslo_vmware.api [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Waiting for the task: (returnval){ [ 1483.793895] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]520886b3-807c-b7f0-9bda-e4aae27ed6ae" [ 1483.793895] env[68798]: _type = "Task" [ 1483.793895] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.801945] env[68798]: DEBUG oslo_vmware.api [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]520886b3-807c-b7f0-9bda-e4aae27ed6ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.072262] env[68798]: DEBUG nova.network.neutron [req-2093cf5b-dba2-40d1-8be7-df4344e4d082 req-257f21ea-2ca1-48bc-886d-882a5c8d7183 service nova] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Updated VIF entry in instance network info cache for port 4a15d22a-8877-45d8-8c00-bce9e4de0fda. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1484.072706] env[68798]: DEBUG nova.network.neutron [req-2093cf5b-dba2-40d1-8be7-df4344e4d082 req-257f21ea-2ca1-48bc-886d-882a5c8d7183 service nova] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Updating instance_info_cache with network_info: [{"id": "4a15d22a-8877-45d8-8c00-bce9e4de0fda", "address": "fa:16:3e:f1:1b:f8", "network": {"id": "43209d73-d9d2-49e9-90a2-621df2971dbb", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-241836634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb153c1ecdcf4cb6b280665b504f5eab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35fcdc55-dc29-451b-ad56-3a03b044dc81", "external-id": "nsx-vlan-transportzone-552", "segmentation_id": 552, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a15d22a-88", "ovs_interfaceid": "4a15d22a-8877-45d8-8c00-bce9e4de0fda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1484.086211] env[68798]: DEBUG oslo_concurrency.lockutils [req-2093cf5b-dba2-40d1-8be7-df4344e4d082 req-257f21ea-2ca1-48bc-886d-882a5c8d7183 service nova] Releasing lock "refresh_cache-36980008-f639-4c88-afcf-0dba40420b87" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1484.304737] env[68798]: DEBUG oslo_concurrency.lockutils [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1484.305062] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1484.305294] env[68798]: DEBUG oslo_concurrency.lockutils [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1486.049837] env[68798]: DEBUG oslo_concurrency.lockutils [None req-04e1efcd-5e61-4e97-833a-2b84a6805ba6 tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Acquiring lock "36980008-f639-4c88-afcf-0dba40420b87" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1504.348567] env[68798]: DEBUG oslo_concurrency.lockutils [None req-af258cdd-553a-4a55-be18-bdd5b4780225 tempest-ServerPasswordTestJSON-123226599 tempest-ServerPasswordTestJSON-123226599-project-member] Acquiring lock "c0c535e6-e833-4b6a-870a-e1add9625765" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1504.348847] env[68798]: DEBUG oslo_concurrency.lockutils [None req-af258cdd-553a-4a55-be18-bdd5b4780225 tempest-ServerPasswordTestJSON-123226599 tempest-ServerPasswordTestJSON-123226599-project-member] Lock "c0c535e6-e833-4b6a-870a-e1add9625765" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1524.148202] env[68798]: DEBUG oslo_concurrency.lockutils [None req-5b946144-b26d-41ff-b248-adf617e39ff4 tempest-ServerActionsV293TestJSON-878530011 tempest-ServerActionsV293TestJSON-878530011-project-member] Acquiring lock "31f33c3a-e089-4ed5-b60c-3c2d5f55a529" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1524.149047] env[68798]: DEBUG oslo_concurrency.lockutils [None req-5b946144-b26d-41ff-b248-adf617e39ff4 tempest-ServerActionsV293TestJSON-878530011 tempest-ServerActionsV293TestJSON-878530011-project-member] Lock "31f33c3a-e089-4ed5-b60c-3c2d5f55a529" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.646070] env[68798]: WARNING oslo_vmware.rw_handles [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1525.646070] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1525.646070] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1525.646070] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1525.646070] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1525.646070] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 1525.646070] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1525.646070] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1525.646070] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1525.646070] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1525.646070] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1525.646070] env[68798]: ERROR oslo_vmware.rw_handles [ 1525.646942] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/58fa299f-190b-406f-972e-5552cf580d51/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1525.648767] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1525.649042] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Copying Virtual Disk [datastore1] vmware_temp/58fa299f-190b-406f-972e-5552cf580d51/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/58fa299f-190b-406f-972e-5552cf580d51/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1525.649360] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-209e88e0-a89e-41d3-aaef-7f3bf345926b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.657146] env[68798]: DEBUG oslo_vmware.api [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Waiting for the task: (returnval){ [ 1525.657146] env[68798]: value = "task-4217663" [ 1525.657146] env[68798]: _type = "Task" [ 1525.657146] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.666479] env[68798]: DEBUG oslo_vmware.api [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Task: {'id': task-4217663, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.168026] env[68798]: DEBUG oslo_vmware.exceptions [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1526.168327] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1526.168927] env[68798]: ERROR nova.compute.manager [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1526.168927] env[68798]: Faults: ['InvalidArgument'] [ 1526.168927] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Traceback (most recent call last): [ 1526.168927] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1526.168927] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] yield resources [ 1526.168927] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1526.168927] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] self.driver.spawn(context, instance, image_meta, [ 1526.168927] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1526.168927] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1526.168927] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1526.168927] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] self._fetch_image_if_missing(context, vi) [ 1526.168927] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1526.168927] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] image_cache(vi, tmp_image_ds_loc) [ 1526.168927] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1526.168927] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] vm_util.copy_virtual_disk( [ 1526.168927] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1526.168927] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] session._wait_for_task(vmdk_copy_task) [ 1526.168927] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1526.168927] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] return self.wait_for_task(task_ref) [ 1526.168927] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1526.168927] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] return evt.wait() [ 1526.168927] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1526.168927] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] result = hub.switch() [ 1526.168927] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1526.168927] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] return self.greenlet.switch() [ 1526.168927] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1526.168927] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] self.f(*self.args, **self.kw) [ 1526.168927] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1526.168927] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] raise exceptions.translate_fault(task_info.error) [ 1526.168927] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1526.168927] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Faults: ['InvalidArgument'] [ 1526.168927] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] [ 1526.169968] env[68798]: INFO nova.compute.manager [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Terminating instance [ 1526.170919] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1526.171173] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1526.171789] env[68798]: DEBUG nova.compute.manager [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1526.171974] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1526.172223] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a2b2ac75-714c-4ee9-b0b0-a7fa18da1168 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.174624] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da845cbd-02a2-4f0a-91c3-7c4d0917bc55 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.182594] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1526.182875] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3776dd2e-d505-49f3-be64-654d7ff7dc4f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.185542] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1526.185714] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1526.186730] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0817f17e-2ee0-4d80-a03f-eab16d4cdf63 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.192225] env[68798]: DEBUG oslo_vmware.api [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Waiting for the task: (returnval){ [ 1526.192225] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]522eb82b-8a0e-6260-84f0-05c988a82c07" [ 1526.192225] env[68798]: _type = "Task" [ 1526.192225] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.204515] env[68798]: DEBUG oslo_vmware.api [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]522eb82b-8a0e-6260-84f0-05c988a82c07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.264887] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1526.265127] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1526.265330] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Deleting the datastore file [datastore1] 1ae2e411-d8e4-4abb-8c7b-b907ebba094c {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1526.265628] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b11f336f-3483-4193-b1a1-172f6a52ce8f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.272623] env[68798]: DEBUG oslo_vmware.api [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Waiting for the task: (returnval){ [ 1526.272623] env[68798]: value = "task-4217665" [ 1526.272623] env[68798]: _type = "Task" [ 1526.272623] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.281573] env[68798]: DEBUG oslo_vmware.api [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Task: {'id': task-4217665, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.703486] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1526.703845] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Creating directory with path [datastore1] vmware_temp/c8d4a2d4-2fbc-4690-9e36-398ee3aadeac/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1526.703922] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d72ba5fc-203a-427b-8dd5-f1e10f5e1702 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.715919] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Created directory with path [datastore1] vmware_temp/c8d4a2d4-2fbc-4690-9e36-398ee3aadeac/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1526.716127] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Fetch image to [datastore1] vmware_temp/c8d4a2d4-2fbc-4690-9e36-398ee3aadeac/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1526.716306] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/c8d4a2d4-2fbc-4690-9e36-398ee3aadeac/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1526.717111] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8cbde0-d6d4-4876-99f1-fd770f76a540 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.724695] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d846cc87-4076-41ab-98a0-3d25fa27fed6 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.734035] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6da7a853-2338-4d8c-bb22-cbb9a258ebd0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.765387] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e819a7-e72d-4919-a272-6cbdcc1a6f75 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.772064] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-13846b15-64ea-4f48-af68-5dc4e36d7918 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.782103] env[68798]: DEBUG oslo_vmware.api [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Task: {'id': task-4217665, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067859} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.782352] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1526.782537] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1526.782714] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1526.782892] env[68798]: INFO nova.compute.manager [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1526.785238] env[68798]: DEBUG nova.compute.claims [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1526.785444] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1526.785669] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1526.801411] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1526.847517] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1526.864725] env[68798]: DEBUG oslo_vmware.rw_handles [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c8d4a2d4-2fbc-4690-9e36-398ee3aadeac/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1526.927785] env[68798]: DEBUG oslo_vmware.rw_handles [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1526.928092] env[68798]: DEBUG oslo_vmware.rw_handles [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c8d4a2d4-2fbc-4690-9e36-398ee3aadeac/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1527.048984] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1527.049392] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1527.126571] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d143f8-2909-472c-a3e7-af9d041b61e0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.134350] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-574056ce-4426-47a0-88c6-32c07e055168 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.163872] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07677a6d-6b6a-4f0e-812d-3f02675ecb83 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.171244] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-013633fb-faaf-40b6-9a9c-0d41486c74c1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.184432] env[68798]: DEBUG nova.compute.provider_tree [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1527.198060] env[68798]: DEBUG nova.scheduler.client.report [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1527.217380] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.431s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1527.217943] env[68798]: ERROR nova.compute.manager [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1527.217943] env[68798]: Faults: ['InvalidArgument'] [ 1527.217943] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Traceback (most recent call last): [ 1527.217943] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1527.217943] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] self.driver.spawn(context, instance, image_meta, [ 1527.217943] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1527.217943] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1527.217943] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1527.217943] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] self._fetch_image_if_missing(context, vi) [ 1527.217943] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1527.217943] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] image_cache(vi, tmp_image_ds_loc) [ 1527.217943] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1527.217943] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] vm_util.copy_virtual_disk( [ 1527.217943] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1527.217943] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] session._wait_for_task(vmdk_copy_task) [ 1527.217943] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1527.217943] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] return self.wait_for_task(task_ref) [ 1527.217943] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1527.217943] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] return evt.wait() [ 1527.217943] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1527.217943] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] result = hub.switch() [ 1527.217943] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1527.217943] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] return self.greenlet.switch() [ 1527.217943] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1527.217943] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] self.f(*self.args, **self.kw) [ 1527.217943] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1527.217943] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] raise exceptions.translate_fault(task_info.error) [ 1527.217943] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1527.217943] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Faults: ['InvalidArgument'] [ 1527.217943] env[68798]: ERROR nova.compute.manager [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] [ 1527.218857] env[68798]: DEBUG nova.compute.utils [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1527.220346] env[68798]: DEBUG nova.compute.manager [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Build of instance 1ae2e411-d8e4-4abb-8c7b-b907ebba094c was re-scheduled: A specified parameter was not correct: fileType [ 1527.220346] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1527.220753] env[68798]: DEBUG nova.compute.manager [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1527.220946] env[68798]: DEBUG nova.compute.manager [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1527.221177] env[68798]: DEBUG nova.compute.manager [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1527.221358] env[68798]: DEBUG nova.network.neutron [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1528.033264] env[68798]: DEBUG nova.network.neutron [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1528.045855] env[68798]: INFO nova.compute.manager [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Took 0.82 seconds to deallocate network for instance. [ 1528.049891] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1528.049891] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1528.049891] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1528.072724] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1528.072898] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1528.073521] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1528.073521] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1528.073521] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1528.073521] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1528.073794] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1528.073794] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1528.073863] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1528.073992] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1528.075027] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1528.075027] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1528.075027] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1528.161144] env[68798]: INFO nova.scheduler.client.report [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Deleted allocations for instance 1ae2e411-d8e4-4abb-8c7b-b907ebba094c [ 1528.190943] env[68798]: DEBUG oslo_concurrency.lockutils [None req-a55eb0fc-6cf3-4e33-8547-33ccc1e1368f tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Lock "1ae2e411-d8e4-4abb-8c7b-b907ebba094c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 625.759s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1528.191901] env[68798]: DEBUG oslo_concurrency.lockutils [None req-787d79de-1b68-4995-8e93-c460b395089e tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Lock "1ae2e411-d8e4-4abb-8c7b-b907ebba094c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 429.450s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1528.192156] env[68798]: DEBUG oslo_concurrency.lockutils [None req-787d79de-1b68-4995-8e93-c460b395089e tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Acquiring lock "1ae2e411-d8e4-4abb-8c7b-b907ebba094c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1528.192362] env[68798]: DEBUG oslo_concurrency.lockutils [None req-787d79de-1b68-4995-8e93-c460b395089e tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Lock "1ae2e411-d8e4-4abb-8c7b-b907ebba094c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1528.192526] env[68798]: DEBUG oslo_concurrency.lockutils [None req-787d79de-1b68-4995-8e93-c460b395089e tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Lock "1ae2e411-d8e4-4abb-8c7b-b907ebba094c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1528.194630] env[68798]: INFO nova.compute.manager [None req-787d79de-1b68-4995-8e93-c460b395089e tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Terminating instance [ 1528.196260] env[68798]: DEBUG nova.compute.manager [None req-787d79de-1b68-4995-8e93-c460b395089e tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1528.196453] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-787d79de-1b68-4995-8e93-c460b395089e tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1528.196908] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-774c89bf-31aa-469e-9a70-4ddc186146c9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.205540] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db3fb9a2-afc5-4bbd-b57e-3974905ae7a9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.216305] env[68798]: DEBUG nova.compute.manager [None req-707a0910-729f-467a-832a-0b7cd7e429f1 tempest-InstanceActionsV221TestJSON-115434527 tempest-InstanceActionsV221TestJSON-115434527-project-member] [instance: 1a5de688-91c2-4197-a396-c0df71fdbeda] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1528.237409] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-787d79de-1b68-4995-8e93-c460b395089e tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1ae2e411-d8e4-4abb-8c7b-b907ebba094c could not be found. [ 1528.237627] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-787d79de-1b68-4995-8e93-c460b395089e tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1528.237808] env[68798]: INFO nova.compute.manager [None req-787d79de-1b68-4995-8e93-c460b395089e tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1528.238073] env[68798]: DEBUG oslo.service.loopingcall [None req-787d79de-1b68-4995-8e93-c460b395089e tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1528.238338] env[68798]: DEBUG nova.compute.manager [-] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1528.238437] env[68798]: DEBUG nova.network.neutron [-] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1528.245410] env[68798]: DEBUG nova.compute.manager [None req-707a0910-729f-467a-832a-0b7cd7e429f1 tempest-InstanceActionsV221TestJSON-115434527 tempest-InstanceActionsV221TestJSON-115434527-project-member] [instance: 1a5de688-91c2-4197-a396-c0df71fdbeda] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1528.264101] env[68798]: DEBUG nova.network.neutron [-] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1528.270629] env[68798]: DEBUG oslo_concurrency.lockutils [None req-707a0910-729f-467a-832a-0b7cd7e429f1 tempest-InstanceActionsV221TestJSON-115434527 tempest-InstanceActionsV221TestJSON-115434527-project-member] Lock "1a5de688-91c2-4197-a396-c0df71fdbeda" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 226.968s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1528.276080] env[68798]: INFO nova.compute.manager [-] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] Took 0.04 seconds to deallocate network for instance. [ 1528.282443] env[68798]: DEBUG nova.compute.manager [None req-d83a0c3d-3e8f-4aac-9666-cb3ba610c52f tempest-ServerShowV254Test-1493978018 tempest-ServerShowV254Test-1493978018-project-member] [instance: c142b101-e8b0-4073-9079-5c9730eac176] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1528.307671] env[68798]: DEBUG nova.compute.manager [None req-d83a0c3d-3e8f-4aac-9666-cb3ba610c52f tempest-ServerShowV254Test-1493978018 tempest-ServerShowV254Test-1493978018-project-member] [instance: c142b101-e8b0-4073-9079-5c9730eac176] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1528.332513] env[68798]: DEBUG oslo_concurrency.lockutils [None req-d83a0c3d-3e8f-4aac-9666-cb3ba610c52f tempest-ServerShowV254Test-1493978018 tempest-ServerShowV254Test-1493978018-project-member] Lock "c142b101-e8b0-4073-9079-5c9730eac176" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.274s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1528.343214] env[68798]: DEBUG nova.compute.manager [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1528.408135] env[68798]: DEBUG oslo_concurrency.lockutils [None req-787d79de-1b68-4995-8e93-c460b395089e tempest-ServersTestManualDisk-2045641251 tempest-ServersTestManualDisk-2045641251-project-member] Lock "1ae2e411-d8e4-4abb-8c7b-b907ebba094c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.216s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1528.409088] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "1ae2e411-d8e4-4abb-8c7b-b907ebba094c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 352.898s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1528.409294] env[68798]: INFO nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 1ae2e411-d8e4-4abb-8c7b-b907ebba094c] During sync_power_state the instance has a pending task (deleting). Skip. [ 1528.409473] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "1ae2e411-d8e4-4abb-8c7b-b907ebba094c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1528.411545] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1528.411771] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1528.413374] env[68798]: INFO nova.compute.claims [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1528.697013] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a4fab12-59e3-4216-a85f-1a0093173315 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.705438] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffbf5789-73af-48c6-a08b-c5c78d3f39a1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.739633] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d38ba4dc-70dd-4a32-bb32-17d28cca15c9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.748293] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cc471db-6d31-4daa-afa4-a4a661e86048 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.762751] env[68798]: DEBUG nova.compute.provider_tree [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1528.771592] env[68798]: DEBUG nova.scheduler.client.report [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1528.785093] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.373s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1528.786025] env[68798]: DEBUG nova.compute.manager [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1528.830761] env[68798]: DEBUG nova.compute.utils [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1528.832498] env[68798]: DEBUG nova.compute.manager [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1528.832707] env[68798]: DEBUG nova.network.neutron [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1528.843032] env[68798]: DEBUG nova.compute.manager [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1528.892196] env[68798]: DEBUG nova.policy [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc6caa6d7e4a49cf83311781e678d723', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5a4aaad5cff4665905ecbfb2adf895c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 1528.917468] env[68798]: DEBUG nova.compute.manager [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1528.953080] env[68798]: DEBUG nova.virt.hardware [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1528.953293] env[68798]: DEBUG nova.virt.hardware [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1528.953426] env[68798]: DEBUG nova.virt.hardware [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1528.953607] env[68798]: DEBUG nova.virt.hardware [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1528.953755] env[68798]: DEBUG nova.virt.hardware [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1528.953985] env[68798]: DEBUG nova.virt.hardware [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1528.954185] env[68798]: DEBUG nova.virt.hardware [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1528.954350] env[68798]: DEBUG nova.virt.hardware [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1528.954521] env[68798]: DEBUG nova.virt.hardware [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1528.954714] env[68798]: DEBUG nova.virt.hardware [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1528.954854] env[68798]: DEBUG nova.virt.hardware [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1528.956128] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cda0696-6cc7-4233-98c0-65220a2d2369 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.964953] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3abe2b2d-a515-469b-a532-060ea196bcfe {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.048937] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1529.300192] env[68798]: DEBUG nova.network.neutron [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Successfully created port: d45549b8-70b9-4970-8bac-ded130667c0c {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1530.071512] env[68798]: DEBUG nova.compute.manager [req-6a19b035-c313-46cb-8a1d-9e2b9e4b5b47 req-394b389d-0312-43bb-924f-bf2a8a9e0a87 service nova] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Received event network-vif-plugged-d45549b8-70b9-4970-8bac-ded130667c0c {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1530.071811] env[68798]: DEBUG oslo_concurrency.lockutils [req-6a19b035-c313-46cb-8a1d-9e2b9e4b5b47 req-394b389d-0312-43bb-924f-bf2a8a9e0a87 service nova] Acquiring lock "b430775d-fcfb-4233-bc78-87d279e82fb5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.071973] env[68798]: DEBUG oslo_concurrency.lockutils [req-6a19b035-c313-46cb-8a1d-9e2b9e4b5b47 req-394b389d-0312-43bb-924f-bf2a8a9e0a87 service nova] Lock "b430775d-fcfb-4233-bc78-87d279e82fb5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.072119] env[68798]: DEBUG oslo_concurrency.lockutils [req-6a19b035-c313-46cb-8a1d-9e2b9e4b5b47 req-394b389d-0312-43bb-924f-bf2a8a9e0a87 service nova] Lock "b430775d-fcfb-4233-bc78-87d279e82fb5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.072271] env[68798]: DEBUG nova.compute.manager [req-6a19b035-c313-46cb-8a1d-9e2b9e4b5b47 req-394b389d-0312-43bb-924f-bf2a8a9e0a87 service nova] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] No waiting events found dispatching network-vif-plugged-d45549b8-70b9-4970-8bac-ded130667c0c {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1530.072434] env[68798]: WARNING nova.compute.manager [req-6a19b035-c313-46cb-8a1d-9e2b9e4b5b47 req-394b389d-0312-43bb-924f-bf2a8a9e0a87 service nova] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Received unexpected event network-vif-plugged-d45549b8-70b9-4970-8bac-ded130667c0c for instance with vm_state building and task_state spawning. [ 1530.085499] env[68798]: DEBUG nova.network.neutron [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Successfully updated port: d45549b8-70b9-4970-8bac-ded130667c0c {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1530.101571] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Acquiring lock "refresh_cache-b430775d-fcfb-4233-bc78-87d279e82fb5" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1530.101829] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Acquired lock "refresh_cache-b430775d-fcfb-4233-bc78-87d279e82fb5" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.101975] env[68798]: DEBUG nova.network.neutron [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1530.175324] env[68798]: DEBUG nova.network.neutron [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1530.401379] env[68798]: DEBUG nova.network.neutron [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Updating instance_info_cache with network_info: [{"id": "d45549b8-70b9-4970-8bac-ded130667c0c", "address": "fa:16:3e:a9:14:bc", "network": {"id": "5bb583ce-8d7c-4a1a-83dd-e53b28a5bdcc", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1715555607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5a4aaad5cff4665905ecbfb2adf895c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4", "external-id": "nsx-vlan-transportzone-545", "segmentation_id": 545, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd45549b8-70", "ovs_interfaceid": "d45549b8-70b9-4970-8bac-ded130667c0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1530.414187] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Releasing lock "refresh_cache-b430775d-fcfb-4233-bc78-87d279e82fb5" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1530.414540] env[68798]: DEBUG nova.compute.manager [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Instance network_info: |[{"id": "d45549b8-70b9-4970-8bac-ded130667c0c", "address": "fa:16:3e:a9:14:bc", "network": {"id": "5bb583ce-8d7c-4a1a-83dd-e53b28a5bdcc", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1715555607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5a4aaad5cff4665905ecbfb2adf895c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4", "external-id": "nsx-vlan-transportzone-545", "segmentation_id": 545, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd45549b8-70", "ovs_interfaceid": "d45549b8-70b9-4970-8bac-ded130667c0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1530.414972] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:14:bc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd45549b8-70b9-4970-8bac-ded130667c0c', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1530.422881] env[68798]: DEBUG oslo.service.loopingcall [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1530.423579] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1530.423806] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d4e9eee8-d634-4fb1-84ee-517b6cbef2b7 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.446082] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1530.446082] env[68798]: value = "task-4217666" [ 1530.446082] env[68798]: _type = "Task" [ 1530.446082] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.455545] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217666, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.957675] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217666, 'name': CreateVM_Task, 'duration_secs': 0.34921} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.957877] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1530.958587] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1530.958757] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.959127] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1530.959494] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc8fc018-009f-4442-96a8-fd643d1ab517 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.965099] env[68798]: DEBUG oslo_vmware.api [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Waiting for the task: (returnval){ [ 1530.965099] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52f11b3e-187a-a3c2-a5f6-a0e813c48bff" [ 1530.965099] env[68798]: _type = "Task" [ 1530.965099] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.974293] env[68798]: DEBUG oslo_vmware.api [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52f11b3e-187a-a3c2-a5f6-a0e813c48bff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.048156] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1531.478180] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1531.478557] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1531.478838] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1532.112243] env[68798]: DEBUG nova.compute.manager [req-42e982ce-11f7-4043-9e38-4e97b3840526 req-343237e1-b190-444d-bed4-29c3e40e9c0b service nova] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Received event network-changed-d45549b8-70b9-4970-8bac-ded130667c0c {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1532.112487] env[68798]: DEBUG nova.compute.manager [req-42e982ce-11f7-4043-9e38-4e97b3840526 req-343237e1-b190-444d-bed4-29c3e40e9c0b service nova] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Refreshing instance network info cache due to event network-changed-d45549b8-70b9-4970-8bac-ded130667c0c. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1532.112665] env[68798]: DEBUG oslo_concurrency.lockutils [req-42e982ce-11f7-4043-9e38-4e97b3840526 req-343237e1-b190-444d-bed4-29c3e40e9c0b service nova] Acquiring lock "refresh_cache-b430775d-fcfb-4233-bc78-87d279e82fb5" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1532.112810] env[68798]: DEBUG oslo_concurrency.lockutils [req-42e982ce-11f7-4043-9e38-4e97b3840526 req-343237e1-b190-444d-bed4-29c3e40e9c0b service nova] Acquired lock "refresh_cache-b430775d-fcfb-4233-bc78-87d279e82fb5" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1532.112972] env[68798]: DEBUG nova.network.neutron [req-42e982ce-11f7-4043-9e38-4e97b3840526 req-343237e1-b190-444d-bed4-29c3e40e9c0b service nova] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Refreshing network info cache for port d45549b8-70b9-4970-8bac-ded130667c0c {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1532.447208] env[68798]: DEBUG nova.network.neutron [req-42e982ce-11f7-4043-9e38-4e97b3840526 req-343237e1-b190-444d-bed4-29c3e40e9c0b service nova] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Updated VIF entry in instance network info cache for port d45549b8-70b9-4970-8bac-ded130667c0c. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1532.447596] env[68798]: DEBUG nova.network.neutron [req-42e982ce-11f7-4043-9e38-4e97b3840526 req-343237e1-b190-444d-bed4-29c3e40e9c0b service nova] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Updating instance_info_cache with network_info: [{"id": "d45549b8-70b9-4970-8bac-ded130667c0c", "address": "fa:16:3e:a9:14:bc", "network": {"id": "5bb583ce-8d7c-4a1a-83dd-e53b28a5bdcc", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1715555607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5a4aaad5cff4665905ecbfb2adf895c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa0d7fd6-4fd3-4451-9208-8a3cf25e30e4", "external-id": "nsx-vlan-transportzone-545", "segmentation_id": 545, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd45549b8-70", "ovs_interfaceid": "d45549b8-70b9-4970-8bac-ded130667c0c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1532.458158] env[68798]: DEBUG oslo_concurrency.lockutils [req-42e982ce-11f7-4043-9e38-4e97b3840526 req-343237e1-b190-444d-bed4-29c3e40e9c0b service nova] Releasing lock "refresh_cache-b430775d-fcfb-4233-bc78-87d279e82fb5" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1534.044800] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1536.048342] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1536.061856] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1536.062097] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.062271] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1536.062429] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1536.063750] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5be1153-e10f-4b6e-8010-90751b1fc696 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.072469] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a380d493-7519-43c2-8a26-8eb883197ced {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.086460] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b2a792-ec20-47d0-a488-341c4124fcfb {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.092623] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76c98cd3-486f-443c-851a-fa45bb6c9a9c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.120720] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180689MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1536.120857] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1536.121063] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.206162] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1536.206325] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 5e53196f-984a-4d72-8e00-861ef0751dca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1536.206453] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 7bea1932-0490-409b-99b0-bd1f3f1a9d5d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1536.206574] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance da5d9023-f6c1-44f8-9465-36aa2b109924 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1536.206692] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e75b2848-5dfa-4ffa-b37a-6338c8221dd3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1536.206808] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance cbe4e626-f063-4877-985f-b3e36c161c9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1536.206924] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 17cce398-d2f8-47a6-b714-c4e54caec516 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1536.207051] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ca976c34-4eb0-46aa-a243-91401f842c32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1536.207169] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 36980008-f639-4c88-afcf-0dba40420b87 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1536.207285] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance b430775d-fcfb-4233-bc78-87d279e82fb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1536.218517] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 71c99eda-d55d-4d60-92d2-a5553c3c3760 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1536.229196] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance a4e41ed1-2b39-4475-bd13-1680ff46ff6f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1536.239947] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance de6aeec1-7138-4fac-ac3b-aaced21ef9da has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1536.250051] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 3ed17cef-5946-475d-994c-568aa7f83ea4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1536.259831] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ab6c3a4b-2208-49c8-b92f-1f08c0b225f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1536.269729] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance c0c535e6-e833-4b6a-870a-e1add9625765 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1536.279124] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 31f33c3a-e089-4ed5-b60c-3c2d5f55a529 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1536.279348] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1536.279492] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1536.480073] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c3adeb-a6bf-44ee-b73d-018b779715f5 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.488295] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6744c86-b6f4-41c6-8219-11452c60227f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.517816] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41391dab-cec9-4a87-a6dd-f9c650545a99 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.525899] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea58fdf5-e534-49bf-b3cf-fcbb933e272f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.539471] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1536.549538] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1536.563810] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1536.563967] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.443s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.766838] env[68798]: DEBUG oslo_concurrency.lockutils [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquiring lock "57a34323-ebdd-4495-ab62-f7b82ab804d9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1537.767142] env[68798]: DEBUG oslo_concurrency.lockutils [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "57a34323-ebdd-4495-ab62-f7b82ab804d9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.666802] env[68798]: DEBUG oslo_concurrency.lockutils [None req-25c13558-c418-4c4c-9504-bfe026f76772 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Acquiring lock "b430775d-fcfb-4233-bc78-87d279e82fb5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1572.892963] env[68798]: WARNING oslo_vmware.rw_handles [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1572.892963] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1572.892963] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1572.892963] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1572.892963] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1572.892963] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 1572.892963] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1572.892963] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1572.892963] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1572.892963] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1572.892963] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1572.892963] env[68798]: ERROR oslo_vmware.rw_handles [ 1572.893743] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/c8d4a2d4-2fbc-4690-9e36-398ee3aadeac/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1572.895766] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1572.896083] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Copying Virtual Disk [datastore1] vmware_temp/c8d4a2d4-2fbc-4690-9e36-398ee3aadeac/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/c8d4a2d4-2fbc-4690-9e36-398ee3aadeac/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1572.896411] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-36302839-5436-433f-95ca-0b5aa97a0fcb {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.904957] env[68798]: DEBUG oslo_vmware.api [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Waiting for the task: (returnval){ [ 1572.904957] env[68798]: value = "task-4217667" [ 1572.904957] env[68798]: _type = "Task" [ 1572.904957] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.913303] env[68798]: DEBUG oslo_vmware.api [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Task: {'id': task-4217667, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.416347] env[68798]: DEBUG oslo_vmware.exceptions [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1573.416826] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1573.417487] env[68798]: ERROR nova.compute.manager [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1573.417487] env[68798]: Faults: ['InvalidArgument'] [ 1573.417487] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Traceback (most recent call last): [ 1573.417487] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1573.417487] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] yield resources [ 1573.417487] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1573.417487] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] self.driver.spawn(context, instance, image_meta, [ 1573.417487] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1573.417487] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1573.417487] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1573.417487] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] self._fetch_image_if_missing(context, vi) [ 1573.417487] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1573.417487] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] image_cache(vi, tmp_image_ds_loc) [ 1573.417487] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1573.417487] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] vm_util.copy_virtual_disk( [ 1573.417487] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1573.417487] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] session._wait_for_task(vmdk_copy_task) [ 1573.417487] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1573.417487] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] return self.wait_for_task(task_ref) [ 1573.417487] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1573.417487] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] return evt.wait() [ 1573.417487] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1573.417487] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] result = hub.switch() [ 1573.417487] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1573.417487] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] return self.greenlet.switch() [ 1573.417487] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1573.417487] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] self.f(*self.args, **self.kw) [ 1573.417487] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1573.417487] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] raise exceptions.translate_fault(task_info.error) [ 1573.417487] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1573.417487] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Faults: ['InvalidArgument'] [ 1573.417487] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] [ 1573.418690] env[68798]: INFO nova.compute.manager [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Terminating instance [ 1573.419561] env[68798]: DEBUG oslo_concurrency.lockutils [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1573.419806] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1573.420087] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a0fc708-fe1e-4667-8ff4-475a9111a577 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.422687] env[68798]: DEBUG nova.compute.manager [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1573.422918] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1573.423652] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-112d4a3f-1d6d-4ac2-8c52-ea6e6cd972ff {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.431332] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1573.431589] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-17529321-c348-4c79-917d-dd2ac3b62ede {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.434170] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1573.434350] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1573.435358] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81d7a438-d8c3-4dc6-bedf-3e862de872bb {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.440412] env[68798]: DEBUG oslo_vmware.api [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Waiting for the task: (returnval){ [ 1573.440412] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52994e55-5bcd-d4bc-8f8a-e61739534816" [ 1573.440412] env[68798]: _type = "Task" [ 1573.440412] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.450840] env[68798]: DEBUG oslo_vmware.api [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52994e55-5bcd-d4bc-8f8a-e61739534816, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.502739] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1573.502981] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1573.503184] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Deleting the datastore file [datastore1] 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1573.503463] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2de4ad23-6639-4f7d-bfa3-835b7c5970ea {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.509692] env[68798]: DEBUG oslo_vmware.api [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Waiting for the task: (returnval){ [ 1573.509692] env[68798]: value = "task-4217669" [ 1573.509692] env[68798]: _type = "Task" [ 1573.509692] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.517963] env[68798]: DEBUG oslo_vmware.api [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Task: {'id': task-4217669, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.951683] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1573.952070] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Creating directory with path [datastore1] vmware_temp/75f483d6-ccae-4174-a4ea-cdcf43d3152f/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1573.952377] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f3e40955-c5d2-439f-b433-5a7228dbca5f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.964963] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Created directory with path [datastore1] vmware_temp/75f483d6-ccae-4174-a4ea-cdcf43d3152f/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1573.965187] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Fetch image to [datastore1] vmware_temp/75f483d6-ccae-4174-a4ea-cdcf43d3152f/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1573.965365] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/75f483d6-ccae-4174-a4ea-cdcf43d3152f/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1573.966201] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-115809f1-17a7-4549-a116-ac1b37f15d5e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.976066] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d0a0f77-52e7-40b1-ab58-9856bdc88524 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.984749] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fd47ab3-5ebf-4b85-b5c2-3d65db3a996c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.018457] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5adff1d-df81-4930-9471-eadf3d99dc5b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.027345] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ce8a1e2e-56ff-4fe5-9402-b3ec17dba492 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.029070] env[68798]: DEBUG oslo_vmware.api [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Task: {'id': task-4217669, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069215} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.029311] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1574.029492] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1574.029663] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1574.029837] env[68798]: INFO nova.compute.manager [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1574.032112] env[68798]: DEBUG nova.compute.claims [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1574.032290] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1574.032502] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1574.054292] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1574.107028] env[68798]: DEBUG oslo_vmware.rw_handles [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/75f483d6-ccae-4174-a4ea-cdcf43d3152f/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1574.164162] env[68798]: DEBUG oslo_vmware.rw_handles [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1574.164357] env[68798]: DEBUG oslo_vmware.rw_handles [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/75f483d6-ccae-4174-a4ea-cdcf43d3152f/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1574.355863] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85c72343-1141-4103-802c-9d75c73a3eaa {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.364045] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7341ceef-ce6b-4cf3-8b01-9fc288892ec9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.395270] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-629f4e1b-e6f1-4a57-804e-164d4b595c14 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.402896] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-223b4a96-ad02-4351-94c9-dcc8afe054bd {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.416621] env[68798]: DEBUG nova.compute.provider_tree [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1574.426022] env[68798]: DEBUG nova.scheduler.client.report [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1574.439709] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.407s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.440207] env[68798]: ERROR nova.compute.manager [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1574.440207] env[68798]: Faults: ['InvalidArgument'] [ 1574.440207] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Traceback (most recent call last): [ 1574.440207] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1574.440207] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] self.driver.spawn(context, instance, image_meta, [ 1574.440207] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1574.440207] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1574.440207] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1574.440207] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] self._fetch_image_if_missing(context, vi) [ 1574.440207] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1574.440207] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] image_cache(vi, tmp_image_ds_loc) [ 1574.440207] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1574.440207] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] vm_util.copy_virtual_disk( [ 1574.440207] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1574.440207] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] session._wait_for_task(vmdk_copy_task) [ 1574.440207] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1574.440207] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] return self.wait_for_task(task_ref) [ 1574.440207] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1574.440207] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] return evt.wait() [ 1574.440207] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1574.440207] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] result = hub.switch() [ 1574.440207] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1574.440207] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] return self.greenlet.switch() [ 1574.440207] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1574.440207] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] self.f(*self.args, **self.kw) [ 1574.440207] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1574.440207] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] raise exceptions.translate_fault(task_info.error) [ 1574.440207] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1574.440207] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Faults: ['InvalidArgument'] [ 1574.440207] env[68798]: ERROR nova.compute.manager [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] [ 1574.440969] env[68798]: DEBUG nova.compute.utils [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1574.444236] env[68798]: DEBUG nova.compute.manager [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Build of instance 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa was re-scheduled: A specified parameter was not correct: fileType [ 1574.444236] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1574.444677] env[68798]: DEBUG nova.compute.manager [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1574.444905] env[68798]: DEBUG nova.compute.manager [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1574.445105] env[68798]: DEBUG nova.compute.manager [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1574.445277] env[68798]: DEBUG nova.network.neutron [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1574.779327] env[68798]: DEBUG nova.network.neutron [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1574.795548] env[68798]: INFO nova.compute.manager [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Took 0.35 seconds to deallocate network for instance. [ 1574.925350] env[68798]: INFO nova.scheduler.client.report [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Deleted allocations for instance 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa [ 1574.948847] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f94b8a46-ba0e-468b-99bb-8afcad39e638 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Lock "5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 622.101s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.950196] env[68798]: DEBUG oslo_concurrency.lockutils [None req-532044d4-89fe-4171-88c2-c8adf46a3f4f tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Lock "5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 425.948s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1574.950449] env[68798]: DEBUG oslo_concurrency.lockutils [None req-532044d4-89fe-4171-88c2-c8adf46a3f4f tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Acquiring lock "5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1574.950708] env[68798]: DEBUG oslo_concurrency.lockutils [None req-532044d4-89fe-4171-88c2-c8adf46a3f4f tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Lock "5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1574.951275] env[68798]: DEBUG oslo_concurrency.lockutils [None req-532044d4-89fe-4171-88c2-c8adf46a3f4f tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Lock "5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.956812] env[68798]: INFO nova.compute.manager [None req-532044d4-89fe-4171-88c2-c8adf46a3f4f tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Terminating instance [ 1574.958825] env[68798]: DEBUG nova.compute.manager [None req-532044d4-89fe-4171-88c2-c8adf46a3f4f tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1574.959107] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-532044d4-89fe-4171-88c2-c8adf46a3f4f tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1574.959392] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2c156649-4d66-4bae-af7d-2aa3a2740248 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.964588] env[68798]: DEBUG nova.compute.manager [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1574.971389] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7407198-9f69-4ffe-8fbc-d1d92f2d8c17 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.002411] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-532044d4-89fe-4171-88c2-c8adf46a3f4f tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa could not be found. [ 1575.002645] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-532044d4-89fe-4171-88c2-c8adf46a3f4f tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1575.002834] env[68798]: INFO nova.compute.manager [None req-532044d4-89fe-4171-88c2-c8adf46a3f4f tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1575.003147] env[68798]: DEBUG oslo.service.loopingcall [None req-532044d4-89fe-4171-88c2-c8adf46a3f4f tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1575.005768] env[68798]: DEBUG nova.compute.manager [-] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1575.005871] env[68798]: DEBUG nova.network.neutron [-] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1575.020272] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1575.020521] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1575.022032] env[68798]: INFO nova.compute.claims [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1575.035765] env[68798]: DEBUG nova.network.neutron [-] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1575.056429] env[68798]: INFO nova.compute.manager [-] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] Took 0.05 seconds to deallocate network for instance. [ 1575.154182] env[68798]: DEBUG oslo_concurrency.lockutils [None req-532044d4-89fe-4171-88c2-c8adf46a3f4f tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Lock "5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.204s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1575.155586] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 399.644s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1575.155929] env[68798]: INFO nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa] During sync_power_state the instance has a pending task (deleting). Skip. [ 1575.156249] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "5bd5df0f-56d5-4d85-ae37-2c25f4cb7afa" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1575.301331] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ed069f9-7feb-4453-8ba0-5740f1ef0c12 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.309330] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fb95525-c7c8-43e4-9c26-981c58f297df {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.340060] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b5e6bcb-2075-45a6-9786-bae411f8720a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.348369] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4783a38d-6626-4b2b-9b07-698da5377d5d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.362238] env[68798]: DEBUG nova.compute.provider_tree [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1575.370836] env[68798]: DEBUG nova.scheduler.client.report [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1575.385052] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.364s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1575.385554] env[68798]: DEBUG nova.compute.manager [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1575.419222] env[68798]: DEBUG nova.compute.utils [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1575.420675] env[68798]: DEBUG nova.compute.manager [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1575.420864] env[68798]: DEBUG nova.network.neutron [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1575.429610] env[68798]: DEBUG nova.compute.manager [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1575.479372] env[68798]: DEBUG nova.policy [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1bfb0d578c564773adf00485f02a2961', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6112f6c97d9d43a3bdd81bae8026a3a7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 1575.501227] env[68798]: DEBUG nova.compute.manager [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1575.526497] env[68798]: DEBUG nova.virt.hardware [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1575.526777] env[68798]: DEBUG nova.virt.hardware [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1575.526936] env[68798]: DEBUG nova.virt.hardware [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1575.527079] env[68798]: DEBUG nova.virt.hardware [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1575.527233] env[68798]: DEBUG nova.virt.hardware [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1575.527379] env[68798]: DEBUG nova.virt.hardware [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1575.527590] env[68798]: DEBUG nova.virt.hardware [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1575.527751] env[68798]: DEBUG nova.virt.hardware [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1575.527921] env[68798]: DEBUG nova.virt.hardware [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1575.528096] env[68798]: DEBUG nova.virt.hardware [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1575.528275] env[68798]: DEBUG nova.virt.hardware [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1575.529139] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a91fc96-a220-46b5-b74d-2b9c7300dd79 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.539161] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-280235bf-68a0-4a18-9590-3b5d23d665c6 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.814916] env[68798]: DEBUG nova.network.neutron [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Successfully created port: 043d940d-4b0e-49c3-94ad-09c9216d7a54 {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1576.566202] env[68798]: DEBUG nova.network.neutron [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Successfully updated port: 043d940d-4b0e-49c3-94ad-09c9216d7a54 {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1576.584139] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Acquiring lock "refresh_cache-71c99eda-d55d-4d60-92d2-a5553c3c3760" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1576.584139] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Acquired lock "refresh_cache-71c99eda-d55d-4d60-92d2-a5553c3c3760" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1576.584139] env[68798]: DEBUG nova.network.neutron [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1576.632694] env[68798]: DEBUG nova.network.neutron [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1576.822225] env[68798]: DEBUG nova.compute.manager [req-34ba8747-9ffa-4f39-8041-daba4a6e9e32 req-70c007e5-7b5f-4ce1-a7d0-3aff5dc2db97 service nova] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Received event network-vif-plugged-043d940d-4b0e-49c3-94ad-09c9216d7a54 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1576.822225] env[68798]: DEBUG oslo_concurrency.lockutils [req-34ba8747-9ffa-4f39-8041-daba4a6e9e32 req-70c007e5-7b5f-4ce1-a7d0-3aff5dc2db97 service nova] Acquiring lock "71c99eda-d55d-4d60-92d2-a5553c3c3760-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1576.823220] env[68798]: DEBUG oslo_concurrency.lockutils [req-34ba8747-9ffa-4f39-8041-daba4a6e9e32 req-70c007e5-7b5f-4ce1-a7d0-3aff5dc2db97 service nova] Lock "71c99eda-d55d-4d60-92d2-a5553c3c3760-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1576.824028] env[68798]: DEBUG oslo_concurrency.lockutils [req-34ba8747-9ffa-4f39-8041-daba4a6e9e32 req-70c007e5-7b5f-4ce1-a7d0-3aff5dc2db97 service nova] Lock "71c99eda-d55d-4d60-92d2-a5553c3c3760-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1576.824028] env[68798]: DEBUG nova.compute.manager [req-34ba8747-9ffa-4f39-8041-daba4a6e9e32 req-70c007e5-7b5f-4ce1-a7d0-3aff5dc2db97 service nova] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] No waiting events found dispatching network-vif-plugged-043d940d-4b0e-49c3-94ad-09c9216d7a54 {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1576.824028] env[68798]: WARNING nova.compute.manager [req-34ba8747-9ffa-4f39-8041-daba4a6e9e32 req-70c007e5-7b5f-4ce1-a7d0-3aff5dc2db97 service nova] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Received unexpected event network-vif-plugged-043d940d-4b0e-49c3-94ad-09c9216d7a54 for instance with vm_state building and task_state spawning. [ 1576.824219] env[68798]: DEBUG nova.compute.manager [req-34ba8747-9ffa-4f39-8041-daba4a6e9e32 req-70c007e5-7b5f-4ce1-a7d0-3aff5dc2db97 service nova] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Received event network-changed-043d940d-4b0e-49c3-94ad-09c9216d7a54 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1576.824386] env[68798]: DEBUG nova.compute.manager [req-34ba8747-9ffa-4f39-8041-daba4a6e9e32 req-70c007e5-7b5f-4ce1-a7d0-3aff5dc2db97 service nova] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Refreshing instance network info cache due to event network-changed-043d940d-4b0e-49c3-94ad-09c9216d7a54. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1576.824598] env[68798]: DEBUG oslo_concurrency.lockutils [req-34ba8747-9ffa-4f39-8041-daba4a6e9e32 req-70c007e5-7b5f-4ce1-a7d0-3aff5dc2db97 service nova] Acquiring lock "refresh_cache-71c99eda-d55d-4d60-92d2-a5553c3c3760" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1576.825637] env[68798]: DEBUG nova.network.neutron [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Updating instance_info_cache with network_info: [{"id": "043d940d-4b0e-49c3-94ad-09c9216d7a54", "address": "fa:16:3e:8b:82:99", "network": {"id": "e11eccfe-7db3-450d-87ec-b7a6a1af76f1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-61648427-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6112f6c97d9d43a3bdd81bae8026a3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap043d940d-4b", "ovs_interfaceid": "043d940d-4b0e-49c3-94ad-09c9216d7a54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1576.837397] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Releasing lock "refresh_cache-71c99eda-d55d-4d60-92d2-a5553c3c3760" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1576.837751] env[68798]: DEBUG nova.compute.manager [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Instance network_info: |[{"id": "043d940d-4b0e-49c3-94ad-09c9216d7a54", "address": "fa:16:3e:8b:82:99", "network": {"id": "e11eccfe-7db3-450d-87ec-b7a6a1af76f1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-61648427-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6112f6c97d9d43a3bdd81bae8026a3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap043d940d-4b", "ovs_interfaceid": "043d940d-4b0e-49c3-94ad-09c9216d7a54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1576.838085] env[68798]: DEBUG oslo_concurrency.lockutils [req-34ba8747-9ffa-4f39-8041-daba4a6e9e32 req-70c007e5-7b5f-4ce1-a7d0-3aff5dc2db97 service nova] Acquired lock "refresh_cache-71c99eda-d55d-4d60-92d2-a5553c3c3760" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1576.838274] env[68798]: DEBUG nova.network.neutron [req-34ba8747-9ffa-4f39-8041-daba4a6e9e32 req-70c007e5-7b5f-4ce1-a7d0-3aff5dc2db97 service nova] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Refreshing network info cache for port 043d940d-4b0e-49c3-94ad-09c9216d7a54 {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1576.839325] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:82:99', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3093647a-bab7-4562-ada0-428725e8c0fc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '043d940d-4b0e-49c3-94ad-09c9216d7a54', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1576.846636] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Creating folder: Project (6112f6c97d9d43a3bdd81bae8026a3a7). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1576.849710] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2c4282f9-d248-4d10-9d80-10245f0ea733 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.861060] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Created folder: Project (6112f6c97d9d43a3bdd81bae8026a3a7) in parent group-v834492. [ 1576.861060] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Creating folder: Instances. Parent ref: group-v834581. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1576.861060] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-87d09a3d-fd9e-49d8-a6c2-4b0535746a51 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.870767] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Created folder: Instances in parent group-v834581. [ 1576.871019] env[68798]: DEBUG oslo.service.loopingcall [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1576.871211] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1576.871417] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-53f65110-a7e0-4b46-bba6-05ddc74b21bc {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.892420] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1576.892420] env[68798]: value = "task-4217672" [ 1576.892420] env[68798]: _type = "Task" [ 1576.892420] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.900565] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217672, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.101088] env[68798]: DEBUG nova.network.neutron [req-34ba8747-9ffa-4f39-8041-daba4a6e9e32 req-70c007e5-7b5f-4ce1-a7d0-3aff5dc2db97 service nova] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Updated VIF entry in instance network info cache for port 043d940d-4b0e-49c3-94ad-09c9216d7a54. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1577.101518] env[68798]: DEBUG nova.network.neutron [req-34ba8747-9ffa-4f39-8041-daba4a6e9e32 req-70c007e5-7b5f-4ce1-a7d0-3aff5dc2db97 service nova] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Updating instance_info_cache with network_info: [{"id": "043d940d-4b0e-49c3-94ad-09c9216d7a54", "address": "fa:16:3e:8b:82:99", "network": {"id": "e11eccfe-7db3-450d-87ec-b7a6a1af76f1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-61648427-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6112f6c97d9d43a3bdd81bae8026a3a7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3093647a-bab7-4562-ada0-428725e8c0fc", "external-id": "nsx-vlan-transportzone-660", "segmentation_id": 660, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap043d940d-4b", "ovs_interfaceid": "043d940d-4b0e-49c3-94ad-09c9216d7a54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1577.114975] env[68798]: DEBUG oslo_concurrency.lockutils [req-34ba8747-9ffa-4f39-8041-daba4a6e9e32 req-70c007e5-7b5f-4ce1-a7d0-3aff5dc2db97 service nova] Releasing lock "refresh_cache-71c99eda-d55d-4d60-92d2-a5553c3c3760" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1577.403774] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217672, 'name': CreateVM_Task, 'duration_secs': 0.318562} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.403967] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1577.404694] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1577.404816] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1577.405167] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1577.405418] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60bfbf08-f658-4b64-b12e-788cf7171d41 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.410054] env[68798]: DEBUG oslo_vmware.api [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Waiting for the task: (returnval){ [ 1577.410054] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]521364ee-73c7-0ced-5a70-b91f5b20d246" [ 1577.410054] env[68798]: _type = "Task" [ 1577.410054] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.418347] env[68798]: DEBUG oslo_vmware.api [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]521364ee-73c7-0ced-5a70-b91f5b20d246, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.922753] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1577.923198] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1577.923246] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1588.559695] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1588.560126] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1589.048134] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1589.048397] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1589.048460] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1589.071351] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1589.071612] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1589.071612] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1589.071744] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1589.071868] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1589.072022] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1589.072149] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1589.072273] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1589.072397] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1589.073123] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1589.073123] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1589.073418] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1589.073418] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1589.073481] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1589.073610] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1589.073730] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1590.039231] env[68798]: DEBUG oslo_concurrency.lockutils [None req-1973c9c7-7e78-4b54-9797-2618fe437b73 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Acquiring lock "71c99eda-d55d-4d60-92d2-a5553c3c3760" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1593.049294] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1598.048085] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1598.060778] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1598.061008] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1598.061189] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1598.061351] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1598.062576] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c17e8bc-68c9-490f-8219-ec3db38dbd5a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.071443] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2caf0de5-3377-474c-a357-5c2ee424fcd4 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.085674] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f99515-2e86-4a35-af1e-7345534f4ec2 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.092136] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97f3576c-1fec-40b2-820d-094d4a3a2c35 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.123153] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180747MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1598.123303] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1598.123501] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1598.200034] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 5e53196f-984a-4d72-8e00-861ef0751dca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1598.200207] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 7bea1932-0490-409b-99b0-bd1f3f1a9d5d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1598.200337] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance da5d9023-f6c1-44f8-9465-36aa2b109924 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1598.200458] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e75b2848-5dfa-4ffa-b37a-6338c8221dd3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1598.200576] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance cbe4e626-f063-4877-985f-b3e36c161c9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1598.200691] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 17cce398-d2f8-47a6-b714-c4e54caec516 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1598.200807] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ca976c34-4eb0-46aa-a243-91401f842c32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1598.200921] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 36980008-f639-4c88-afcf-0dba40420b87 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1598.201047] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance b430775d-fcfb-4233-bc78-87d279e82fb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1598.201175] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 71c99eda-d55d-4d60-92d2-a5553c3c3760 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1598.213068] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance a4e41ed1-2b39-4475-bd13-1680ff46ff6f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1598.223686] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance de6aeec1-7138-4fac-ac3b-aaced21ef9da has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1598.234862] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 3ed17cef-5946-475d-994c-568aa7f83ea4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1598.245186] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ab6c3a4b-2208-49c8-b92f-1f08c0b225f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1598.254774] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance c0c535e6-e833-4b6a-870a-e1add9625765 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1598.264208] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 31f33c3a-e089-4ed5-b60c-3c2d5f55a529 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1598.273668] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 57a34323-ebdd-4495-ab62-f7b82ab804d9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1598.273892] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1598.274094] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1598.481948] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efcba571-a4db-4160-9b5d-a29527a4f186 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.489901] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff4e0b9-1500-4d2d-bf48-0329626c85a2 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.519502] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a9f0451-ba72-4abc-8e5d-c6a20ffe1755 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.526964] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dfb6314-1727-43b7-87c3-eb0e8230c461 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.541186] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1598.550528] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1598.565875] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1598.566087] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.443s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1619.265051] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquiring lock "6f0e769a-33db-48c6-9a88-cceb310cb819" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1619.265511] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "6f0e769a-33db-48c6-9a88-cceb310cb819" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1620.681589] env[68798]: WARNING oslo_vmware.rw_handles [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1620.681589] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1620.681589] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1620.681589] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1620.681589] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1620.681589] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 1620.681589] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1620.681589] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1620.681589] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1620.681589] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1620.681589] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1620.681589] env[68798]: ERROR oslo_vmware.rw_handles [ 1620.682360] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/75f483d6-ccae-4174-a4ea-cdcf43d3152f/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1620.684238] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1620.684602] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Copying Virtual Disk [datastore1] vmware_temp/75f483d6-ccae-4174-a4ea-cdcf43d3152f/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/75f483d6-ccae-4174-a4ea-cdcf43d3152f/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1620.684896] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f9ab61e1-3cdb-46fb-8121-a3e8e0088fcc {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.694892] env[68798]: DEBUG oslo_vmware.api [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Waiting for the task: (returnval){ [ 1620.694892] env[68798]: value = "task-4217673" [ 1620.694892] env[68798]: _type = "Task" [ 1620.694892] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.703652] env[68798]: DEBUG oslo_vmware.api [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Task: {'id': task-4217673, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.205946] env[68798]: DEBUG oslo_vmware.exceptions [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1621.206268] env[68798]: DEBUG oslo_concurrency.lockutils [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1621.206814] env[68798]: ERROR nova.compute.manager [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1621.206814] env[68798]: Faults: ['InvalidArgument'] [ 1621.206814] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Traceback (most recent call last): [ 1621.206814] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1621.206814] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] yield resources [ 1621.206814] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1621.206814] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] self.driver.spawn(context, instance, image_meta, [ 1621.206814] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1621.206814] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1621.206814] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1621.206814] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] self._fetch_image_if_missing(context, vi) [ 1621.206814] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1621.206814] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] image_cache(vi, tmp_image_ds_loc) [ 1621.206814] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1621.206814] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] vm_util.copy_virtual_disk( [ 1621.206814] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1621.206814] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] session._wait_for_task(vmdk_copy_task) [ 1621.206814] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1621.206814] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] return self.wait_for_task(task_ref) [ 1621.206814] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1621.206814] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] return evt.wait() [ 1621.206814] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1621.206814] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] result = hub.switch() [ 1621.206814] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1621.206814] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] return self.greenlet.switch() [ 1621.206814] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1621.206814] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] self.f(*self.args, **self.kw) [ 1621.206814] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1621.206814] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] raise exceptions.translate_fault(task_info.error) [ 1621.206814] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1621.206814] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Faults: ['InvalidArgument'] [ 1621.206814] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] [ 1621.207902] env[68798]: INFO nova.compute.manager [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Terminating instance [ 1621.208742] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1621.208951] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1621.209617] env[68798]: DEBUG nova.compute.manager [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1621.209807] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1621.210060] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a4ad53c1-7d5c-4b95-813e-e6baae14790b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.212666] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d9cf587-d7d6-4097-be41-7d9299285b33 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.220140] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1621.220355] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9b50f5ee-627d-4518-af61-ceae13de80bc {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.222754] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1621.222930] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1621.223938] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-748db5a8-73aa-4fd4-9ad1-a7221b51db91 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.229198] env[68798]: DEBUG oslo_vmware.api [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Waiting for the task: (returnval){ [ 1621.229198] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]5283776f-22bc-92e7-4ab6-3737ccadd4ba" [ 1621.229198] env[68798]: _type = "Task" [ 1621.229198] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.236702] env[68798]: DEBUG oslo_vmware.api [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]5283776f-22bc-92e7-4ab6-3737ccadd4ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.302022] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1621.302296] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1621.302517] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Deleting the datastore file [datastore1] 5e53196f-984a-4d72-8e00-861ef0751dca {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1621.302828] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a661bfd0-0932-41be-83bf-3ce0cf6d1ff4 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.310012] env[68798]: DEBUG oslo_vmware.api [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Waiting for the task: (returnval){ [ 1621.310012] env[68798]: value = "task-4217675" [ 1621.310012] env[68798]: _type = "Task" [ 1621.310012] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.318720] env[68798]: DEBUG oslo_vmware.api [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Task: {'id': task-4217675, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.741120] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1621.741120] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Creating directory with path [datastore1] vmware_temp/364b0ca2-7c71-461e-aade-c2ce3277cbb3/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1621.741120] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5bfd5ecd-c82e-441d-b76e-5450e2a3d1ea {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.752766] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Created directory with path [datastore1] vmware_temp/364b0ca2-7c71-461e-aade-c2ce3277cbb3/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1621.752959] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Fetch image to [datastore1] vmware_temp/364b0ca2-7c71-461e-aade-c2ce3277cbb3/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1621.753143] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/364b0ca2-7c71-461e-aade-c2ce3277cbb3/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1621.754015] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad824cce-2fca-483c-a4d4-9262373dd1ea {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.762175] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ccd6bad-9ac5-4c7f-8fa2-e1ae15dbaf61 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.771724] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03063d02-2b4d-4c0e-bcd1-9ae96c730576 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.803272] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9513fac-e801-475e-b5c1-dc58ffd368bb {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.809748] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6888e25f-dcc1-4dd9-aef5-a0119e64c925 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.819733] env[68798]: DEBUG oslo_vmware.api [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Task: {'id': task-4217675, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070791} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.819979] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1621.820173] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1621.820353] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1621.820531] env[68798]: INFO nova.compute.manager [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1621.822969] env[68798]: DEBUG nova.compute.claims [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1621.823254] env[68798]: DEBUG oslo_concurrency.lockutils [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1621.823482] env[68798]: DEBUG oslo_concurrency.lockutils [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1621.835887] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1621.892130] env[68798]: DEBUG oslo_vmware.rw_handles [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/364b0ca2-7c71-461e-aade-c2ce3277cbb3/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1621.950857] env[68798]: DEBUG oslo_vmware.rw_handles [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1621.951071] env[68798]: DEBUG oslo_vmware.rw_handles [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/364b0ca2-7c71-461e-aade-c2ce3277cbb3/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1622.143061] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-617603e4-1c1f-4d49-8326-78828e5ed783 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.153047] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50866b43-4f0b-4b2b-b5ae-cba79e7c50c5 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.183217] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61852aa1-801e-4afd-b3fb-83ec14b371d5 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.190951] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ebbf4da-5474-47ee-9762-fc130c219790 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.204292] env[68798]: DEBUG nova.compute.provider_tree [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1622.213565] env[68798]: DEBUG nova.scheduler.client.report [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1622.228726] env[68798]: DEBUG oslo_concurrency.lockutils [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.405s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1622.229301] env[68798]: ERROR nova.compute.manager [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1622.229301] env[68798]: Faults: ['InvalidArgument'] [ 1622.229301] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Traceback (most recent call last): [ 1622.229301] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1622.229301] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] self.driver.spawn(context, instance, image_meta, [ 1622.229301] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1622.229301] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1622.229301] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1622.229301] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] self._fetch_image_if_missing(context, vi) [ 1622.229301] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1622.229301] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] image_cache(vi, tmp_image_ds_loc) [ 1622.229301] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1622.229301] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] vm_util.copy_virtual_disk( [ 1622.229301] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1622.229301] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] session._wait_for_task(vmdk_copy_task) [ 1622.229301] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1622.229301] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] return self.wait_for_task(task_ref) [ 1622.229301] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1622.229301] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] return evt.wait() [ 1622.229301] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1622.229301] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] result = hub.switch() [ 1622.229301] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1622.229301] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] return self.greenlet.switch() [ 1622.229301] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1622.229301] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] self.f(*self.args, **self.kw) [ 1622.229301] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1622.229301] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] raise exceptions.translate_fault(task_info.error) [ 1622.229301] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1622.229301] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Faults: ['InvalidArgument'] [ 1622.229301] env[68798]: ERROR nova.compute.manager [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] [ 1622.230099] env[68798]: DEBUG nova.compute.utils [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1622.231922] env[68798]: DEBUG nova.compute.manager [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Build of instance 5e53196f-984a-4d72-8e00-861ef0751dca was re-scheduled: A specified parameter was not correct: fileType [ 1622.231922] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1622.232327] env[68798]: DEBUG nova.compute.manager [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1622.232501] env[68798]: DEBUG nova.compute.manager [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1622.232676] env[68798]: DEBUG nova.compute.manager [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1622.232837] env[68798]: DEBUG nova.network.neutron [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1622.739018] env[68798]: DEBUG nova.network.neutron [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1622.747732] env[68798]: INFO nova.compute.manager [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Took 0.51 seconds to deallocate network for instance. [ 1622.861589] env[68798]: INFO nova.scheduler.client.report [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Deleted allocations for instance 5e53196f-984a-4d72-8e00-861ef0751dca [ 1622.881362] env[68798]: DEBUG oslo_concurrency.lockutils [None req-95ec64b3-2b1f-439f-af55-c839c735799c tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Lock "5e53196f-984a-4d72-8e00-861ef0751dca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 626.828s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1622.882518] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cc6fee12-b163-469a-9a92-fd8b19db0c0e tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Lock "5e53196f-984a-4d72-8e00-861ef0751dca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 429.579s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1622.882744] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cc6fee12-b163-469a-9a92-fd8b19db0c0e tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Acquiring lock "5e53196f-984a-4d72-8e00-861ef0751dca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1622.882950] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cc6fee12-b163-469a-9a92-fd8b19db0c0e tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Lock "5e53196f-984a-4d72-8e00-861ef0751dca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1622.883144] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cc6fee12-b163-469a-9a92-fd8b19db0c0e tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Lock "5e53196f-984a-4d72-8e00-861ef0751dca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1622.885983] env[68798]: INFO nova.compute.manager [None req-cc6fee12-b163-469a-9a92-fd8b19db0c0e tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Terminating instance [ 1622.888388] env[68798]: DEBUG nova.compute.manager [None req-cc6fee12-b163-469a-9a92-fd8b19db0c0e tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1622.888388] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6fee12-b163-469a-9a92-fd8b19db0c0e tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1622.888632] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c0d30bd9-c25b-4b49-ab9f-ff90fd610b64 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.898738] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5931bbff-b4c5-450a-955f-5484266c176e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.909535] env[68798]: DEBUG nova.compute.manager [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1622.931950] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-cc6fee12-b163-469a-9a92-fd8b19db0c0e tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5e53196f-984a-4d72-8e00-861ef0751dca could not be found. [ 1622.932185] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-cc6fee12-b163-469a-9a92-fd8b19db0c0e tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1622.932369] env[68798]: INFO nova.compute.manager [None req-cc6fee12-b163-469a-9a92-fd8b19db0c0e tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1622.932626] env[68798]: DEBUG oslo.service.loopingcall [None req-cc6fee12-b163-469a-9a92-fd8b19db0c0e tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1622.932854] env[68798]: DEBUG nova.compute.manager [-] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1622.932950] env[68798]: DEBUG nova.network.neutron [-] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1622.959244] env[68798]: DEBUG nova.network.neutron [-] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1622.960781] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1622.961021] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1622.962669] env[68798]: INFO nova.compute.claims [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1622.970027] env[68798]: INFO nova.compute.manager [-] [instance: 5e53196f-984a-4d72-8e00-861ef0751dca] Took 0.04 seconds to deallocate network for instance. [ 1623.075806] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cc6fee12-b163-469a-9a92-fd8b19db0c0e tempest-ListServerFiltersTestJSON-698649794 tempest-ListServerFiltersTestJSON-698649794-project-member] Lock "5e53196f-984a-4d72-8e00-861ef0751dca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.193s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1623.232297] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9972c887-69a4-46f8-9a8c-1127e087f994 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.240463] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcd266b8-029a-4df9-89f7-8a11cacc3929 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.271100] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2214d9a3-eb1c-4d57-b047-cae3d55eb9e9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.279302] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55796414-5818-453d-83af-7611c380d0a5 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.292855] env[68798]: DEBUG nova.compute.provider_tree [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1623.301882] env[68798]: DEBUG nova.scheduler.client.report [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1623.319093] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.358s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1623.319599] env[68798]: DEBUG nova.compute.manager [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1623.354884] env[68798]: DEBUG nova.compute.utils [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1623.356525] env[68798]: DEBUG nova.compute.manager [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1623.356699] env[68798]: DEBUG nova.network.neutron [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1623.366356] env[68798]: DEBUG nova.compute.manager [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1623.424208] env[68798]: DEBUG nova.policy [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1e8eeed2608b439ba6f850309088e05d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '28b733a1dd73485d903fed78f0e13450', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 1623.439960] env[68798]: DEBUG nova.compute.manager [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1623.467023] env[68798]: DEBUG nova.virt.hardware [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1623.467210] env[68798]: DEBUG nova.virt.hardware [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1623.467460] env[68798]: DEBUG nova.virt.hardware [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1623.467853] env[68798]: DEBUG nova.virt.hardware [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1623.468015] env[68798]: DEBUG nova.virt.hardware [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1623.468300] env[68798]: DEBUG nova.virt.hardware [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1623.468370] env[68798]: DEBUG nova.virt.hardware [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1623.468537] env[68798]: DEBUG nova.virt.hardware [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1623.468770] env[68798]: DEBUG nova.virt.hardware [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1623.468965] env[68798]: DEBUG nova.virt.hardware [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1623.469171] env[68798]: DEBUG nova.virt.hardware [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1623.470069] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51440e47-ef35-477e-b811-98007c074163 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.480134] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b458514d-3f88-4c0a-99b0-ae2650c7aa27 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.841947] env[68798]: DEBUG nova.network.neutron [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Successfully created port: 043b1210-36a2-4596-8a76-16e901f3a675 {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1624.716829] env[68798]: DEBUG nova.network.neutron [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Successfully updated port: 043b1210-36a2-4596-8a76-16e901f3a675 {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1624.730097] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Acquiring lock "refresh_cache-a4e41ed1-2b39-4475-bd13-1680ff46ff6f" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1624.730256] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Acquired lock "refresh_cache-a4e41ed1-2b39-4475-bd13-1680ff46ff6f" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1624.730409] env[68798]: DEBUG nova.network.neutron [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1624.808084] env[68798]: DEBUG nova.network.neutron [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1624.876568] env[68798]: DEBUG nova.compute.manager [req-ea9a2b66-ecb6-4211-9e17-5339147dcc67 req-427d43d2-352b-4de3-89ef-1ae1579d6cb6 service nova] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Received event network-vif-plugged-043b1210-36a2-4596-8a76-16e901f3a675 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1624.876920] env[68798]: DEBUG oslo_concurrency.lockutils [req-ea9a2b66-ecb6-4211-9e17-5339147dcc67 req-427d43d2-352b-4de3-89ef-1ae1579d6cb6 service nova] Acquiring lock "a4e41ed1-2b39-4475-bd13-1680ff46ff6f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.877322] env[68798]: DEBUG oslo_concurrency.lockutils [req-ea9a2b66-ecb6-4211-9e17-5339147dcc67 req-427d43d2-352b-4de3-89ef-1ae1579d6cb6 service nova] Lock "a4e41ed1-2b39-4475-bd13-1680ff46ff6f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.877530] env[68798]: DEBUG oslo_concurrency.lockutils [req-ea9a2b66-ecb6-4211-9e17-5339147dcc67 req-427d43d2-352b-4de3-89ef-1ae1579d6cb6 service nova] Lock "a4e41ed1-2b39-4475-bd13-1680ff46ff6f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.877682] env[68798]: DEBUG nova.compute.manager [req-ea9a2b66-ecb6-4211-9e17-5339147dcc67 req-427d43d2-352b-4de3-89ef-1ae1579d6cb6 service nova] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] No waiting events found dispatching network-vif-plugged-043b1210-36a2-4596-8a76-16e901f3a675 {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1624.877858] env[68798]: WARNING nova.compute.manager [req-ea9a2b66-ecb6-4211-9e17-5339147dcc67 req-427d43d2-352b-4de3-89ef-1ae1579d6cb6 service nova] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Received unexpected event network-vif-plugged-043b1210-36a2-4596-8a76-16e901f3a675 for instance with vm_state building and task_state spawning. [ 1624.878030] env[68798]: DEBUG nova.compute.manager [req-ea9a2b66-ecb6-4211-9e17-5339147dcc67 req-427d43d2-352b-4de3-89ef-1ae1579d6cb6 service nova] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Received event network-changed-043b1210-36a2-4596-8a76-16e901f3a675 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1624.878195] env[68798]: DEBUG nova.compute.manager [req-ea9a2b66-ecb6-4211-9e17-5339147dcc67 req-427d43d2-352b-4de3-89ef-1ae1579d6cb6 service nova] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Refreshing instance network info cache due to event network-changed-043b1210-36a2-4596-8a76-16e901f3a675. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1624.878363] env[68798]: DEBUG oslo_concurrency.lockutils [req-ea9a2b66-ecb6-4211-9e17-5339147dcc67 req-427d43d2-352b-4de3-89ef-1ae1579d6cb6 service nova] Acquiring lock "refresh_cache-a4e41ed1-2b39-4475-bd13-1680ff46ff6f" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1625.050786] env[68798]: DEBUG nova.network.neutron [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Updating instance_info_cache with network_info: [{"id": "043b1210-36a2-4596-8a76-16e901f3a675", "address": "fa:16:3e:63:19:a8", "network": {"id": "9cb680c5-3d82-4b26-a69f-90c74e2df0f0", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1040215090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28b733a1dd73485d903fed78f0e13450", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap043b1210-36", "ovs_interfaceid": "043b1210-36a2-4596-8a76-16e901f3a675", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1625.061789] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Releasing lock "refresh_cache-a4e41ed1-2b39-4475-bd13-1680ff46ff6f" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1625.062128] env[68798]: DEBUG nova.compute.manager [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Instance network_info: |[{"id": "043b1210-36a2-4596-8a76-16e901f3a675", "address": "fa:16:3e:63:19:a8", "network": {"id": "9cb680c5-3d82-4b26-a69f-90c74e2df0f0", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1040215090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28b733a1dd73485d903fed78f0e13450", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap043b1210-36", "ovs_interfaceid": "043b1210-36a2-4596-8a76-16e901f3a675", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1625.062495] env[68798]: DEBUG oslo_concurrency.lockutils [req-ea9a2b66-ecb6-4211-9e17-5339147dcc67 req-427d43d2-352b-4de3-89ef-1ae1579d6cb6 service nova] Acquired lock "refresh_cache-a4e41ed1-2b39-4475-bd13-1680ff46ff6f" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1625.062675] env[68798]: DEBUG nova.network.neutron [req-ea9a2b66-ecb6-4211-9e17-5339147dcc67 req-427d43d2-352b-4de3-89ef-1ae1579d6cb6 service nova] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Refreshing network info cache for port 043b1210-36a2-4596-8a76-16e901f3a675 {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1625.063803] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:19:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '74f30339-6421-4654-bddb-81d7f34db9d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '043b1210-36a2-4596-8a76-16e901f3a675', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1625.072387] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Creating folder: Project (28b733a1dd73485d903fed78f0e13450). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1625.073380] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2b18811b-5142-4eff-afc9-0c1e6ffa7db6 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.087994] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Created folder: Project (28b733a1dd73485d903fed78f0e13450) in parent group-v834492. [ 1625.088226] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Creating folder: Instances. Parent ref: group-v834584. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1625.088444] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-50e7aa64-1def-44c7-bdc1-f96a0dec9a16 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.097970] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Created folder: Instances in parent group-v834584. [ 1625.098224] env[68798]: DEBUG oslo.service.loopingcall [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1625.098417] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1625.098717] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7e1a7757-7550-4231-8bb8-92261e4f5739 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.119712] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1625.119712] env[68798]: value = "task-4217678" [ 1625.119712] env[68798]: _type = "Task" [ 1625.119712] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.131199] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217678, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.343549] env[68798]: DEBUG nova.network.neutron [req-ea9a2b66-ecb6-4211-9e17-5339147dcc67 req-427d43d2-352b-4de3-89ef-1ae1579d6cb6 service nova] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Updated VIF entry in instance network info cache for port 043b1210-36a2-4596-8a76-16e901f3a675. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1625.343945] env[68798]: DEBUG nova.network.neutron [req-ea9a2b66-ecb6-4211-9e17-5339147dcc67 req-427d43d2-352b-4de3-89ef-1ae1579d6cb6 service nova] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Updating instance_info_cache with network_info: [{"id": "043b1210-36a2-4596-8a76-16e901f3a675", "address": "fa:16:3e:63:19:a8", "network": {"id": "9cb680c5-3d82-4b26-a69f-90c74e2df0f0", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1040215090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28b733a1dd73485d903fed78f0e13450", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74f30339-6421-4654-bddb-81d7f34db9d7", "external-id": "nsx-vlan-transportzone-899", "segmentation_id": 899, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap043b1210-36", "ovs_interfaceid": "043b1210-36a2-4596-8a76-16e901f3a675", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1625.354933] env[68798]: DEBUG oslo_concurrency.lockutils [req-ea9a2b66-ecb6-4211-9e17-5339147dcc67 req-427d43d2-352b-4de3-89ef-1ae1579d6cb6 service nova] Releasing lock "refresh_cache-a4e41ed1-2b39-4475-bd13-1680ff46ff6f" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1625.629456] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217678, 'name': CreateVM_Task, 'duration_secs': 0.335122} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.629666] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1625.630364] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1625.630532] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1625.630847] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1625.631115] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f05aff3-a8a1-4fd0-94b9-4018bb1b498b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.635939] env[68798]: DEBUG oslo_vmware.api [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Waiting for the task: (returnval){ [ 1625.635939] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]5208ac01-4331-7f21-755f-be866cf8673d" [ 1625.635939] env[68798]: _type = "Task" [ 1625.635939] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.643960] env[68798]: DEBUG oslo_vmware.api [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]5208ac01-4331-7f21-755f-be866cf8673d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.146868] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1626.146868] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1626.146868] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1633.210521] env[68798]: DEBUG oslo_concurrency.lockutils [None req-59c5c1db-40ce-4212-8ef5-7887e356ab15 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquiring lock "17cce398-d2f8-47a6-b714-c4e54caec516" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1647.724690] env[68798]: DEBUG oslo_concurrency.lockutils [None req-844d6dc2-b58e-40ae-b7cb-c4e4d8cd4f32 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Acquiring lock "a4e41ed1-2b39-4475-bd13-1680ff46ff6f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1649.567857] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1649.568218] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1649.568299] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1650.044657] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1650.048450] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1650.048615] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1650.048753] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1650.071677] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1650.071872] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1650.072016] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1650.072098] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1650.072218] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1650.072386] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1650.072471] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1650.072578] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1650.072837] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1650.072985] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1650.073120] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1650.073635] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1650.073819] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1650.073954] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1652.650301] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Acquiring lock "ce408b93-3713-4819-8c80-63735d9a5467" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1652.650528] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Lock "ce408b93-3713-4819-8c80-63735d9a5467" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1653.049048] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1658.043953] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1659.048794] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1659.060677] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1659.060912] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1659.061091] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1659.061252] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1659.062472] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5f6f5fb-87a7-47e7-bc9e-57afeea0a7d1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.073039] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05f54ebd-6cd2-4820-94b1-3883ad46a561 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.087227] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-622fa985-0aa0-4fc9-85e2-cf1295136e1d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.093607] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ff754be-f6d1-46d5-a2fb-db85a2e6293b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.122391] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180671MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1659.122530] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1659.122788] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1659.197798] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 7bea1932-0490-409b-99b0-bd1f3f1a9d5d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1659.198017] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance da5d9023-f6c1-44f8-9465-36aa2b109924 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1659.198174] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e75b2848-5dfa-4ffa-b37a-6338c8221dd3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1659.198300] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance cbe4e626-f063-4877-985f-b3e36c161c9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1659.198421] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 17cce398-d2f8-47a6-b714-c4e54caec516 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1659.198538] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ca976c34-4eb0-46aa-a243-91401f842c32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1659.198653] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 36980008-f639-4c88-afcf-0dba40420b87 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1659.198768] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance b430775d-fcfb-4233-bc78-87d279e82fb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1659.198879] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 71c99eda-d55d-4d60-92d2-a5553c3c3760 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1659.198991] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance a4e41ed1-2b39-4475-bd13-1680ff46ff6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1659.210513] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ab6c3a4b-2208-49c8-b92f-1f08c0b225f3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1659.221302] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance c0c535e6-e833-4b6a-870a-e1add9625765 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1659.231787] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 31f33c3a-e089-4ed5-b60c-3c2d5f55a529 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1659.243097] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 57a34323-ebdd-4495-ab62-f7b82ab804d9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1659.252766] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 6f0e769a-33db-48c6-9a88-cceb310cb819 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1659.262322] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ce408b93-3713-4819-8c80-63735d9a5467 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1659.262552] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1659.262699] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1659.458224] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31a3a427-ea69-473b-b8ba-1eef44c5ad99 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.465836] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73fb672b-1007-4ab7-8432-9c27ae54d76e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.497234] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-407bc5cd-3f6b-46a6-a460-a41a04231442 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.504746] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c401856b-011b-40a8-88c0-d7f19acb70bf {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.517820] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1659.526985] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1659.542844] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1659.543045] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.420s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1670.701513] env[68798]: WARNING oslo_vmware.rw_handles [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1670.701513] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1670.701513] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1670.701513] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1670.701513] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1670.701513] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 1670.701513] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1670.701513] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1670.701513] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1670.701513] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1670.701513] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1670.701513] env[68798]: ERROR oslo_vmware.rw_handles [ 1670.702174] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/364b0ca2-7c71-461e-aade-c2ce3277cbb3/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1670.705036] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1670.705199] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Copying Virtual Disk [datastore1] vmware_temp/364b0ca2-7c71-461e-aade-c2ce3277cbb3/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/364b0ca2-7c71-461e-aade-c2ce3277cbb3/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1670.705618] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d40edb83-d100-4c67-8e16-7e5759a100d6 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.715301] env[68798]: DEBUG oslo_vmware.api [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Waiting for the task: (returnval){ [ 1670.715301] env[68798]: value = "task-4217679" [ 1670.715301] env[68798]: _type = "Task" [ 1670.715301] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.724340] env[68798]: DEBUG oslo_vmware.api [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Task: {'id': task-4217679, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.226573] env[68798]: DEBUG oslo_vmware.exceptions [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1671.226878] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1671.227483] env[68798]: ERROR nova.compute.manager [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1671.227483] env[68798]: Faults: ['InvalidArgument'] [ 1671.227483] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Traceback (most recent call last): [ 1671.227483] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1671.227483] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] yield resources [ 1671.227483] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1671.227483] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] self.driver.spawn(context, instance, image_meta, [ 1671.227483] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1671.227483] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1671.227483] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1671.227483] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] self._fetch_image_if_missing(context, vi) [ 1671.227483] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1671.227483] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] image_cache(vi, tmp_image_ds_loc) [ 1671.227483] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1671.227483] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] vm_util.copy_virtual_disk( [ 1671.227483] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1671.227483] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] session._wait_for_task(vmdk_copy_task) [ 1671.227483] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1671.227483] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] return self.wait_for_task(task_ref) [ 1671.227483] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1671.227483] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] return evt.wait() [ 1671.227483] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1671.227483] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] result = hub.switch() [ 1671.227483] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1671.227483] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] return self.greenlet.switch() [ 1671.227483] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1671.227483] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] self.f(*self.args, **self.kw) [ 1671.227483] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1671.227483] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] raise exceptions.translate_fault(task_info.error) [ 1671.227483] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1671.227483] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Faults: ['InvalidArgument'] [ 1671.227483] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] [ 1671.228587] env[68798]: INFO nova.compute.manager [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Terminating instance [ 1671.230190] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1671.230190] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1671.230190] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ba686f68-239f-457b-9ccb-7b381c7f4a56 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.232231] env[68798]: DEBUG nova.compute.manager [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1671.232447] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1671.233304] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0fd83b4-de7f-4a89-a061-d6ca6d14893d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.241451] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1671.241730] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-82f6fc18-5a39-4d7c-833b-56a29326136e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.244390] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1671.244615] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1671.245716] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-937daeb5-81cc-4764-8958-e9e8dec62e48 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.252465] env[68798]: DEBUG oslo_vmware.api [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Waiting for the task: (returnval){ [ 1671.252465] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]523a8a7f-6e95-3a02-f273-1f1e72b44a7c" [ 1671.252465] env[68798]: _type = "Task" [ 1671.252465] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.266019] env[68798]: DEBUG oslo_vmware.api [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]523a8a7f-6e95-3a02-f273-1f1e72b44a7c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.327681] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1671.327962] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1671.328174] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Deleting the datastore file [datastore1] 7bea1932-0490-409b-99b0-bd1f3f1a9d5d {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1671.328474] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f3668941-cfcc-44e1-98df-6452923c2248 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.335875] env[68798]: DEBUG oslo_vmware.api [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Waiting for the task: (returnval){ [ 1671.335875] env[68798]: value = "task-4217681" [ 1671.335875] env[68798]: _type = "Task" [ 1671.335875] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.344024] env[68798]: DEBUG oslo_vmware.api [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Task: {'id': task-4217681, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.763448] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1671.763833] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Creating directory with path [datastore1] vmware_temp/85b14af4-10ac-4b45-a2d7-939033289b55/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1671.763941] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0ecc9cfc-1625-49b6-83eb-4c2f32337cf4 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.776723] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Created directory with path [datastore1] vmware_temp/85b14af4-10ac-4b45-a2d7-939033289b55/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1671.776926] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Fetch image to [datastore1] vmware_temp/85b14af4-10ac-4b45-a2d7-939033289b55/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1671.777120] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/85b14af4-10ac-4b45-a2d7-939033289b55/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1671.777940] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b5d17c0-3837-4608-8c32-3576970fbe28 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.785423] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6438ea6-740b-4705-997b-d5c4125e0587 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.795106] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff0f0312-5909-47c8-8dd0-820fb4b7552d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.825769] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19dc7c57-bbc6-476b-8556-c43472644399 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.832434] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-0a5927d9-3d5c-40a4-91cd-bcf009037639 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.844488] env[68798]: DEBUG oslo_vmware.api [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Task: {'id': task-4217681, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07315} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.844738] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1671.844923] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1671.845170] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1671.845373] env[68798]: INFO nova.compute.manager [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1671.847697] env[68798]: DEBUG nova.compute.claims [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1671.847862] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1671.848091] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1671.856967] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1672.042219] env[68798]: DEBUG oslo_vmware.rw_handles [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/85b14af4-10ac-4b45-a2d7-939033289b55/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1672.108665] env[68798]: DEBUG oslo_vmware.rw_handles [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1672.108864] env[68798]: DEBUG oslo_vmware.rw_handles [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/85b14af4-10ac-4b45-a2d7-939033289b55/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1672.182285] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70e6d427-3ee6-471b-b3bc-bc8123c6edc6 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.190894] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a5e066-f65d-4732-ac45-a6aa944c9361 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.222904] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3e758dc-6326-43c2-9ce1-4859b88bc92e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.230487] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7694dd74-03a6-4b55-9a47-a099005b6218 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.243788] env[68798]: DEBUG nova.compute.provider_tree [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1672.252759] env[68798]: DEBUG nova.scheduler.client.report [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1672.268187] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.420s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1672.268806] env[68798]: ERROR nova.compute.manager [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1672.268806] env[68798]: Faults: ['InvalidArgument'] [ 1672.268806] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Traceback (most recent call last): [ 1672.268806] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1672.268806] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] self.driver.spawn(context, instance, image_meta, [ 1672.268806] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1672.268806] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1672.268806] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1672.268806] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] self._fetch_image_if_missing(context, vi) [ 1672.268806] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1672.268806] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] image_cache(vi, tmp_image_ds_loc) [ 1672.268806] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1672.268806] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] vm_util.copy_virtual_disk( [ 1672.268806] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1672.268806] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] session._wait_for_task(vmdk_copy_task) [ 1672.268806] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1672.268806] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] return self.wait_for_task(task_ref) [ 1672.268806] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1672.268806] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] return evt.wait() [ 1672.268806] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1672.268806] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] result = hub.switch() [ 1672.268806] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1672.268806] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] return self.greenlet.switch() [ 1672.268806] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1672.268806] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] self.f(*self.args, **self.kw) [ 1672.268806] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1672.268806] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] raise exceptions.translate_fault(task_info.error) [ 1672.268806] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1672.268806] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Faults: ['InvalidArgument'] [ 1672.268806] env[68798]: ERROR nova.compute.manager [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] [ 1672.269871] env[68798]: DEBUG nova.compute.utils [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1672.271417] env[68798]: DEBUG nova.compute.manager [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Build of instance 7bea1932-0490-409b-99b0-bd1f3f1a9d5d was re-scheduled: A specified parameter was not correct: fileType [ 1672.271417] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1672.271846] env[68798]: DEBUG nova.compute.manager [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1672.272058] env[68798]: DEBUG nova.compute.manager [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1672.272241] env[68798]: DEBUG nova.compute.manager [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1672.272406] env[68798]: DEBUG nova.network.neutron [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1672.711310] env[68798]: DEBUG nova.network.neutron [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1672.726343] env[68798]: INFO nova.compute.manager [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Took 0.45 seconds to deallocate network for instance. [ 1672.843584] env[68798]: INFO nova.scheduler.client.report [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Deleted allocations for instance 7bea1932-0490-409b-99b0-bd1f3f1a9d5d [ 1672.866830] env[68798]: DEBUG oslo_concurrency.lockutils [None req-cfe6cca0-e40d-4641-800f-a66caaa2c724 tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Lock "7bea1932-0490-409b-99b0-bd1f3f1a9d5d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 633.123s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1672.868063] env[68798]: DEBUG oslo_concurrency.lockutils [None req-edb710c6-36ac-4f38-8eaf-b37a9dc211bc tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Lock "7bea1932-0490-409b-99b0-bd1f3f1a9d5d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 436.652s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1672.868297] env[68798]: DEBUG oslo_concurrency.lockutils [None req-edb710c6-36ac-4f38-8eaf-b37a9dc211bc tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Acquiring lock "7bea1932-0490-409b-99b0-bd1f3f1a9d5d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1672.868503] env[68798]: DEBUG oslo_concurrency.lockutils [None req-edb710c6-36ac-4f38-8eaf-b37a9dc211bc tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Lock "7bea1932-0490-409b-99b0-bd1f3f1a9d5d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1672.868668] env[68798]: DEBUG oslo_concurrency.lockutils [None req-edb710c6-36ac-4f38-8eaf-b37a9dc211bc tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Lock "7bea1932-0490-409b-99b0-bd1f3f1a9d5d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1672.870849] env[68798]: INFO nova.compute.manager [None req-edb710c6-36ac-4f38-8eaf-b37a9dc211bc tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Terminating instance [ 1672.872458] env[68798]: DEBUG oslo_concurrency.lockutils [None req-edb710c6-36ac-4f38-8eaf-b37a9dc211bc tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Acquiring lock "refresh_cache-7bea1932-0490-409b-99b0-bd1f3f1a9d5d" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1672.872888] env[68798]: DEBUG oslo_concurrency.lockutils [None req-edb710c6-36ac-4f38-8eaf-b37a9dc211bc tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Acquired lock "refresh_cache-7bea1932-0490-409b-99b0-bd1f3f1a9d5d" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1672.873088] env[68798]: DEBUG nova.network.neutron [None req-edb710c6-36ac-4f38-8eaf-b37a9dc211bc tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1672.878048] env[68798]: DEBUG nova.compute.manager [None req-4c8e15e1-1d7d-4038-b10c-95a93fadfe8b tempest-AttachVolumeNegativeTest-740411461 tempest-AttachVolumeNegativeTest-740411461-project-member] [instance: de6aeec1-7138-4fac-ac3b-aaced21ef9da] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1672.899935] env[68798]: DEBUG nova.network.neutron [None req-edb710c6-36ac-4f38-8eaf-b37a9dc211bc tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1672.904897] env[68798]: DEBUG nova.compute.manager [None req-4c8e15e1-1d7d-4038-b10c-95a93fadfe8b tempest-AttachVolumeNegativeTest-740411461 tempest-AttachVolumeNegativeTest-740411461-project-member] [instance: de6aeec1-7138-4fac-ac3b-aaced21ef9da] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1672.928462] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4c8e15e1-1d7d-4038-b10c-95a93fadfe8b tempest-AttachVolumeNegativeTest-740411461 tempest-AttachVolumeNegativeTest-740411461-project-member] Lock "de6aeec1-7138-4fac-ac3b-aaced21ef9da" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.463s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1672.939191] env[68798]: DEBUG nova.compute.manager [None req-e619623d-0ddf-4fa5-b0f4-5ac6117be62e tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: 3ed17cef-5946-475d-994c-568aa7f83ea4] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1672.966956] env[68798]: DEBUG nova.compute.manager [None req-e619623d-0ddf-4fa5-b0f4-5ac6117be62e tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: 3ed17cef-5946-475d-994c-568aa7f83ea4] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1673.003823] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e619623d-0ddf-4fa5-b0f4-5ac6117be62e tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Lock "3ed17cef-5946-475d-994c-568aa7f83ea4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.059s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1673.017725] env[68798]: DEBUG nova.compute.manager [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1673.069404] env[68798]: DEBUG oslo_concurrency.lockutils [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1673.069857] env[68798]: DEBUG oslo_concurrency.lockutils [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1673.072021] env[68798]: INFO nova.compute.claims [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1673.088267] env[68798]: DEBUG nova.network.neutron [None req-edb710c6-36ac-4f38-8eaf-b37a9dc211bc tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1673.097305] env[68798]: DEBUG oslo_concurrency.lockutils [None req-edb710c6-36ac-4f38-8eaf-b37a9dc211bc tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Releasing lock "refresh_cache-7bea1932-0490-409b-99b0-bd1f3f1a9d5d" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1673.097743] env[68798]: DEBUG nova.compute.manager [None req-edb710c6-36ac-4f38-8eaf-b37a9dc211bc tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1673.097937] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-edb710c6-36ac-4f38-8eaf-b37a9dc211bc tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1673.099171] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5e0d6bb6-8633-47c9-a9b3-c6de24490391 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.109538] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d6bbf2-1f3e-4235-8922-85a7fea351bc {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.143392] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-edb710c6-36ac-4f38-8eaf-b37a9dc211bc tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7bea1932-0490-409b-99b0-bd1f3f1a9d5d could not be found. [ 1673.143392] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-edb710c6-36ac-4f38-8eaf-b37a9dc211bc tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1673.143392] env[68798]: INFO nova.compute.manager [None req-edb710c6-36ac-4f38-8eaf-b37a9dc211bc tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1673.143638] env[68798]: DEBUG oslo.service.loopingcall [None req-edb710c6-36ac-4f38-8eaf-b37a9dc211bc tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1673.143859] env[68798]: DEBUG nova.compute.manager [-] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1673.143968] env[68798]: DEBUG nova.network.neutron [-] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1673.169368] env[68798]: DEBUG nova.network.neutron [-] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1673.178097] env[68798]: DEBUG nova.network.neutron [-] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1673.189771] env[68798]: INFO nova.compute.manager [-] [instance: 7bea1932-0490-409b-99b0-bd1f3f1a9d5d] Took 0.05 seconds to deallocate network for instance. [ 1673.303856] env[68798]: DEBUG oslo_concurrency.lockutils [None req-edb710c6-36ac-4f38-8eaf-b37a9dc211bc tempest-ServersTestJSON-231587440 tempest-ServersTestJSON-231587440-project-member] Lock "7bea1932-0490-409b-99b0-bd1f3f1a9d5d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.435s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1673.359214] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c346071-0469-4192-bb6c-f63b02522af5 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.367394] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b341215e-7560-4322-8617-cc52e5ee5ce2 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.399328] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc3e3cf1-f1bd-4607-81eb-3278a6e68381 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.407359] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c11cf3-6603-4518-8679-2b288d57385d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.420962] env[68798]: DEBUG nova.compute.provider_tree [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1673.429962] env[68798]: DEBUG nova.scheduler.client.report [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1673.447600] env[68798]: DEBUG oslo_concurrency.lockutils [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.378s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1673.448122] env[68798]: DEBUG nova.compute.manager [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1673.487357] env[68798]: DEBUG nova.compute.utils [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1673.488999] env[68798]: DEBUG nova.compute.manager [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1673.488999] env[68798]: DEBUG nova.network.neutron [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1673.500484] env[68798]: DEBUG nova.compute.manager [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1673.554926] env[68798]: DEBUG nova.policy [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '614a181e150a4661b74b7cc8420e3afd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f0d8374636c34757bbb4cbb1783152b3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 1673.575243] env[68798]: DEBUG nova.compute.manager [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1673.602021] env[68798]: DEBUG nova.virt.hardware [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1673.602021] env[68798]: DEBUG nova.virt.hardware [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1673.602021] env[68798]: DEBUG nova.virt.hardware [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1673.602021] env[68798]: DEBUG nova.virt.hardware [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1673.602438] env[68798]: DEBUG nova.virt.hardware [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1673.602438] env[68798]: DEBUG nova.virt.hardware [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1673.602730] env[68798]: DEBUG nova.virt.hardware [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1673.602895] env[68798]: DEBUG nova.virt.hardware [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1673.603095] env[68798]: DEBUG nova.virt.hardware [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1673.603307] env[68798]: DEBUG nova.virt.hardware [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1673.603454] env[68798]: DEBUG nova.virt.hardware [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1673.604469] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d92b5202-5c12-462b-bab9-bee9d641abf5 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.613393] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d17746c6-eb4b-4ba5-8f92-ca4667a308f1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.906604] env[68798]: DEBUG nova.network.neutron [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Successfully created port: d025a3ff-8f97-4550-99e0-0e000f944053 {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1674.773805] env[68798]: DEBUG nova.compute.manager [req-67c97a01-5373-4249-8fb7-595414b21b09 req-cde73e93-d257-4811-86d7-546c6448f6e8 service nova] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Received event network-vif-plugged-d025a3ff-8f97-4550-99e0-0e000f944053 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1674.773805] env[68798]: DEBUG oslo_concurrency.lockutils [req-67c97a01-5373-4249-8fb7-595414b21b09 req-cde73e93-d257-4811-86d7-546c6448f6e8 service nova] Acquiring lock "ab6c3a4b-2208-49c8-b92f-1f08c0b225f3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1674.773805] env[68798]: DEBUG oslo_concurrency.lockutils [req-67c97a01-5373-4249-8fb7-595414b21b09 req-cde73e93-d257-4811-86d7-546c6448f6e8 service nova] Lock "ab6c3a4b-2208-49c8-b92f-1f08c0b225f3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.774021] env[68798]: DEBUG oslo_concurrency.lockutils [req-67c97a01-5373-4249-8fb7-595414b21b09 req-cde73e93-d257-4811-86d7-546c6448f6e8 service nova] Lock "ab6c3a4b-2208-49c8-b92f-1f08c0b225f3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.774187] env[68798]: DEBUG nova.compute.manager [req-67c97a01-5373-4249-8fb7-595414b21b09 req-cde73e93-d257-4811-86d7-546c6448f6e8 service nova] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] No waiting events found dispatching network-vif-plugged-d025a3ff-8f97-4550-99e0-0e000f944053 {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1674.774331] env[68798]: WARNING nova.compute.manager [req-67c97a01-5373-4249-8fb7-595414b21b09 req-cde73e93-d257-4811-86d7-546c6448f6e8 service nova] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Received unexpected event network-vif-plugged-d025a3ff-8f97-4550-99e0-0e000f944053 for instance with vm_state building and task_state spawning. [ 1674.863248] env[68798]: DEBUG nova.network.neutron [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Successfully updated port: d025a3ff-8f97-4550-99e0-0e000f944053 {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1674.874228] env[68798]: DEBUG oslo_concurrency.lockutils [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Acquiring lock "refresh_cache-ab6c3a4b-2208-49c8-b92f-1f08c0b225f3" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1674.874457] env[68798]: DEBUG oslo_concurrency.lockutils [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Acquired lock "refresh_cache-ab6c3a4b-2208-49c8-b92f-1f08c0b225f3" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1674.874621] env[68798]: DEBUG nova.network.neutron [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1674.920969] env[68798]: DEBUG nova.network.neutron [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1675.128223] env[68798]: DEBUG nova.network.neutron [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Updating instance_info_cache with network_info: [{"id": "d025a3ff-8f97-4550-99e0-0e000f944053", "address": "fa:16:3e:b9:82:13", "network": {"id": "55809baf-6eb5-406c-b483-ee4a3f89ebf4", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1954874050-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0d8374636c34757bbb4cbb1783152b3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8812601-ae67-4e0d-b9a2-710b86c53ac5", "external-id": "nsx-vlan-transportzone-85", "segmentation_id": 85, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd025a3ff-8f", "ovs_interfaceid": "d025a3ff-8f97-4550-99e0-0e000f944053", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1675.142135] env[68798]: DEBUG oslo_concurrency.lockutils [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Releasing lock "refresh_cache-ab6c3a4b-2208-49c8-b92f-1f08c0b225f3" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1675.142434] env[68798]: DEBUG nova.compute.manager [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Instance network_info: |[{"id": "d025a3ff-8f97-4550-99e0-0e000f944053", "address": "fa:16:3e:b9:82:13", "network": {"id": "55809baf-6eb5-406c-b483-ee4a3f89ebf4", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1954874050-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0d8374636c34757bbb4cbb1783152b3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8812601-ae67-4e0d-b9a2-710b86c53ac5", "external-id": "nsx-vlan-transportzone-85", "segmentation_id": 85, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd025a3ff-8f", "ovs_interfaceid": "d025a3ff-8f97-4550-99e0-0e000f944053", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1675.142847] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:82:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8812601-ae67-4e0d-b9a2-710b86c53ac5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd025a3ff-8f97-4550-99e0-0e000f944053', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1675.150291] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Creating folder: Project (f0d8374636c34757bbb4cbb1783152b3). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1675.150823] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cb56b82b-94d0-416d-8741-8a5cb0cfffb6 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.161569] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Created folder: Project (f0d8374636c34757bbb4cbb1783152b3) in parent group-v834492. [ 1675.161792] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Creating folder: Instances. Parent ref: group-v834587. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1675.161991] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c98750d3-69c9-47c7-bc28-3f486686062a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.171619] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Created folder: Instances in parent group-v834587. [ 1675.171869] env[68798]: DEBUG oslo.service.loopingcall [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1675.172073] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1675.172289] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-36acca36-5ba0-43c3-862d-0b8b6d560d1c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.192734] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1675.192734] env[68798]: value = "task-4217684" [ 1675.192734] env[68798]: _type = "Task" [ 1675.192734] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.200677] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217684, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.702550] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217684, 'name': CreateVM_Task, 'duration_secs': 0.338178} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.702740] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1675.703438] env[68798]: DEBUG oslo_concurrency.lockutils [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1675.703601] env[68798]: DEBUG oslo_concurrency.lockutils [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1675.703922] env[68798]: DEBUG oslo_concurrency.lockutils [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1675.704201] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24436ce9-7a87-433c-be86-59f2012a17d3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.709133] env[68798]: DEBUG oslo_vmware.api [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Waiting for the task: (returnval){ [ 1675.709133] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52124028-9169-9369-2e2e-0aa52e44aeee" [ 1675.709133] env[68798]: _type = "Task" [ 1675.709133] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.718096] env[68798]: DEBUG oslo_vmware.api [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52124028-9169-9369-2e2e-0aa52e44aeee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.220219] env[68798]: DEBUG oslo_concurrency.lockutils [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1676.220589] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1676.220727] env[68798]: DEBUG oslo_concurrency.lockutils [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1676.799724] env[68798]: DEBUG nova.compute.manager [req-276f911e-6303-42a6-8ad8-7b6ff12be7ea req-43c604b8-9c43-450f-9a4a-441e69c6ad70 service nova] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Received event network-changed-d025a3ff-8f97-4550-99e0-0e000f944053 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1676.799940] env[68798]: DEBUG nova.compute.manager [req-276f911e-6303-42a6-8ad8-7b6ff12be7ea req-43c604b8-9c43-450f-9a4a-441e69c6ad70 service nova] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Refreshing instance network info cache due to event network-changed-d025a3ff-8f97-4550-99e0-0e000f944053. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1676.800171] env[68798]: DEBUG oslo_concurrency.lockutils [req-276f911e-6303-42a6-8ad8-7b6ff12be7ea req-43c604b8-9c43-450f-9a4a-441e69c6ad70 service nova] Acquiring lock "refresh_cache-ab6c3a4b-2208-49c8-b92f-1f08c0b225f3" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1676.800319] env[68798]: DEBUG oslo_concurrency.lockutils [req-276f911e-6303-42a6-8ad8-7b6ff12be7ea req-43c604b8-9c43-450f-9a4a-441e69c6ad70 service nova] Acquired lock "refresh_cache-ab6c3a4b-2208-49c8-b92f-1f08c0b225f3" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1676.800523] env[68798]: DEBUG nova.network.neutron [req-276f911e-6303-42a6-8ad8-7b6ff12be7ea req-43c604b8-9c43-450f-9a4a-441e69c6ad70 service nova] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Refreshing network info cache for port d025a3ff-8f97-4550-99e0-0e000f944053 {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1677.135074] env[68798]: DEBUG nova.network.neutron [req-276f911e-6303-42a6-8ad8-7b6ff12be7ea req-43c604b8-9c43-450f-9a4a-441e69c6ad70 service nova] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Updated VIF entry in instance network info cache for port d025a3ff-8f97-4550-99e0-0e000f944053. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1677.135457] env[68798]: DEBUG nova.network.neutron [req-276f911e-6303-42a6-8ad8-7b6ff12be7ea req-43c604b8-9c43-450f-9a4a-441e69c6ad70 service nova] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Updating instance_info_cache with network_info: [{"id": "d025a3ff-8f97-4550-99e0-0e000f944053", "address": "fa:16:3e:b9:82:13", "network": {"id": "55809baf-6eb5-406c-b483-ee4a3f89ebf4", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1954874050-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f0d8374636c34757bbb4cbb1783152b3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8812601-ae67-4e0d-b9a2-710b86c53ac5", "external-id": "nsx-vlan-transportzone-85", "segmentation_id": 85, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd025a3ff-8f", "ovs_interfaceid": "d025a3ff-8f97-4550-99e0-0e000f944053", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1677.146195] env[68798]: DEBUG oslo_concurrency.lockutils [req-276f911e-6303-42a6-8ad8-7b6ff12be7ea req-43c604b8-9c43-450f-9a4a-441e69c6ad70 service nova] Releasing lock "refresh_cache-ab6c3a4b-2208-49c8-b92f-1f08c0b225f3" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1680.144695] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4afe080c-db82-4e24-bcd4-f5e46579ec80 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Acquiring lock "ab6c3a4b-2208-49c8-b92f-1f08c0b225f3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1709.544504] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1709.544904] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1710.048758] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1710.049253] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1710.049451] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1711.049374] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1711.049690] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1711.049690] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1711.074038] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1711.074317] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1711.074505] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1711.074647] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1711.074772] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1711.074898] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1711.075029] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1711.075154] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1711.075274] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1711.075394] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1711.075514] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1711.076081] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1712.071464] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1714.048559] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1719.049377] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1719.062058] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1719.062315] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1719.062464] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1719.062622] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1719.064026] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-193da6bb-966f-491d-aa61-290eec673880 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.072820] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe22e1e6-dc12-4d1c-a525-fcde799bdf24 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.086634] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6c2ca49-a392-4507-bd9f-fb2458e0f462 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.092996] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75f0b209-13a5-4acc-b17f-4794d7a533cf {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.121050] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180717MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1719.121204] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1719.121394] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1719.196681] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance da5d9023-f6c1-44f8-9465-36aa2b109924 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1719.196855] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance e75b2848-5dfa-4ffa-b37a-6338c8221dd3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1719.196988] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance cbe4e626-f063-4877-985f-b3e36c161c9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1719.197133] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 17cce398-d2f8-47a6-b714-c4e54caec516 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1719.197254] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ca976c34-4eb0-46aa-a243-91401f842c32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1719.197371] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 36980008-f639-4c88-afcf-0dba40420b87 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1719.197488] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance b430775d-fcfb-4233-bc78-87d279e82fb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1719.197601] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 71c99eda-d55d-4d60-92d2-a5553c3c3760 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1719.197715] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance a4e41ed1-2b39-4475-bd13-1680ff46ff6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1719.197827] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ab6c3a4b-2208-49c8-b92f-1f08c0b225f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1719.209981] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 31f33c3a-e089-4ed5-b60c-3c2d5f55a529 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1719.222030] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 57a34323-ebdd-4495-ab62-f7b82ab804d9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1719.233028] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 6f0e769a-33db-48c6-9a88-cceb310cb819 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1719.243447] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ce408b93-3713-4819-8c80-63735d9a5467 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1719.243682] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1719.243856] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1719.424537] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8015e12c-66d7-4637-98d3-936bcd5988df {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.432672] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-221d7189-475f-4c56-9a4e-823460ba6391 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.466280] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1225a814-6923-47f3-91cd-c57ded458b81 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.474358] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3b16712-5aa0-4af1-a6be-6f84a6eb5794 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.488635] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1719.497822] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1719.513223] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1719.513443] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.392s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.721835] env[68798]: WARNING oslo_vmware.rw_handles [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1720.721835] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1720.721835] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1720.721835] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1720.721835] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1720.721835] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 1720.721835] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1720.721835] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1720.721835] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1720.721835] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1720.721835] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1720.721835] env[68798]: ERROR oslo_vmware.rw_handles [ 1720.721835] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/85b14af4-10ac-4b45-a2d7-939033289b55/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1720.722931] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1720.723320] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Copying Virtual Disk [datastore1] vmware_temp/85b14af4-10ac-4b45-a2d7-939033289b55/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/85b14af4-10ac-4b45-a2d7-939033289b55/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1720.723684] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1bebc819-a233-42bb-8311-854f347d6834 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.733661] env[68798]: DEBUG oslo_vmware.api [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Waiting for the task: (returnval){ [ 1720.733661] env[68798]: value = "task-4217685" [ 1720.733661] env[68798]: _type = "Task" [ 1720.733661] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.742801] env[68798]: DEBUG oslo_vmware.api [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Task: {'id': task-4217685, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.244111] env[68798]: DEBUG oslo_vmware.exceptions [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1721.244429] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1721.245036] env[68798]: ERROR nova.compute.manager [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1721.245036] env[68798]: Faults: ['InvalidArgument'] [ 1721.245036] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Traceback (most recent call last): [ 1721.245036] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1721.245036] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] yield resources [ 1721.245036] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1721.245036] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] self.driver.spawn(context, instance, image_meta, [ 1721.245036] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1721.245036] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1721.245036] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1721.245036] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] self._fetch_image_if_missing(context, vi) [ 1721.245036] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1721.245036] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] image_cache(vi, tmp_image_ds_loc) [ 1721.245036] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1721.245036] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] vm_util.copy_virtual_disk( [ 1721.245036] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1721.245036] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] session._wait_for_task(vmdk_copy_task) [ 1721.245036] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1721.245036] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] return self.wait_for_task(task_ref) [ 1721.245036] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1721.245036] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] return evt.wait() [ 1721.245036] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1721.245036] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] result = hub.switch() [ 1721.245036] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1721.245036] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] return self.greenlet.switch() [ 1721.245036] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1721.245036] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] self.f(*self.args, **self.kw) [ 1721.245036] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1721.245036] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] raise exceptions.translate_fault(task_info.error) [ 1721.245036] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1721.245036] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Faults: ['InvalidArgument'] [ 1721.245036] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] [ 1721.245987] env[68798]: INFO nova.compute.manager [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Terminating instance [ 1721.248092] env[68798]: DEBUG oslo_concurrency.lockutils [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1721.248384] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1721.249247] env[68798]: DEBUG nova.compute.manager [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1721.249491] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1721.249826] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0e8169e2-7b2a-4c36-80a4-4c8459d62492 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.252298] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88df65ee-5245-4d09-baba-6f9e6913c024 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.261580] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1721.262119] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b00437a3-9002-4118-9675-fa476573e45e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.264568] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1721.264664] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1721.265639] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7896dc5f-7cb5-49ea-b214-113efef0fd40 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.271450] env[68798]: DEBUG oslo_vmware.api [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Waiting for the task: (returnval){ [ 1721.271450] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52dab341-f4cb-aff4-96b5-806278e946f6" [ 1721.271450] env[68798]: _type = "Task" [ 1721.271450] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.279364] env[68798]: DEBUG oslo_vmware.api [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52dab341-f4cb-aff4-96b5-806278e946f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.341025] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1721.341025] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1721.341025] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Deleting the datastore file [datastore1] da5d9023-f6c1-44f8-9465-36aa2b109924 {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1721.341025] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a77b6cdd-7d29-423d-a764-74f9555a3cc8 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.347043] env[68798]: DEBUG oslo_vmware.api [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Waiting for the task: (returnval){ [ 1721.347043] env[68798]: value = "task-4217687" [ 1721.347043] env[68798]: _type = "Task" [ 1721.347043] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.355289] env[68798]: DEBUG oslo_vmware.api [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Task: {'id': task-4217687, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.783599] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1721.783980] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Creating directory with path [datastore1] vmware_temp/09726e51-34a2-44cd-8733-7be95f74324c/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1721.784119] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-38dd6c71-58e8-40f8-9ea5-32cb25a6bbdd {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.796468] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Created directory with path [datastore1] vmware_temp/09726e51-34a2-44cd-8733-7be95f74324c/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1721.796677] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Fetch image to [datastore1] vmware_temp/09726e51-34a2-44cd-8733-7be95f74324c/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1721.796852] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/09726e51-34a2-44cd-8733-7be95f74324c/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1721.797674] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47af85ef-0c82-47c3-bb76-b961a2e7d53c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.804937] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0d7e520-525f-4396-bb6d-ace3e1eb4641 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.819203] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-823679bd-bbc2-456a-9bbc-472e5e7982fd {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.857416] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a47c55-36f1-465f-b7c9-f39bcc1528af {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.866783] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e396121d-cdb1-4bad-86c5-9dac9ebf5b5d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.868548] env[68798]: DEBUG oslo_vmware.api [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Task: {'id': task-4217687, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07952} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.868780] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1721.868960] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1721.869141] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1721.869339] env[68798]: INFO nova.compute.manager [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1721.871383] env[68798]: DEBUG nova.compute.claims [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1721.871546] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1721.871745] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1721.892662] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1721.969858] env[68798]: DEBUG oslo_vmware.rw_handles [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/09726e51-34a2-44cd-8733-7be95f74324c/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1722.033010] env[68798]: DEBUG oslo_vmware.rw_handles [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1722.033298] env[68798]: DEBUG oslo_vmware.rw_handles [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/09726e51-34a2-44cd-8733-7be95f74324c/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1722.159672] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6b84554-9503-484d-a9e0-eca4651c919a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.167971] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df8c194b-7745-48e1-8e3f-ca86165be369 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.199141] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12a0f489-ac9c-4f91-8d14-581ede72eca2 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.206903] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b622cc67-0d42-4caf-b42a-01deb0732996 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.221231] env[68798]: DEBUG nova.compute.provider_tree [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1722.229917] env[68798]: DEBUG nova.scheduler.client.report [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1722.248842] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.377s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.249374] env[68798]: ERROR nova.compute.manager [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1722.249374] env[68798]: Faults: ['InvalidArgument'] [ 1722.249374] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Traceback (most recent call last): [ 1722.249374] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1722.249374] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] self.driver.spawn(context, instance, image_meta, [ 1722.249374] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1722.249374] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1722.249374] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1722.249374] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] self._fetch_image_if_missing(context, vi) [ 1722.249374] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1722.249374] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] image_cache(vi, tmp_image_ds_loc) [ 1722.249374] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1722.249374] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] vm_util.copy_virtual_disk( [ 1722.249374] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1722.249374] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] session._wait_for_task(vmdk_copy_task) [ 1722.249374] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1722.249374] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] return self.wait_for_task(task_ref) [ 1722.249374] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1722.249374] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] return evt.wait() [ 1722.249374] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1722.249374] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] result = hub.switch() [ 1722.249374] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1722.249374] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] return self.greenlet.switch() [ 1722.249374] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1722.249374] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] self.f(*self.args, **self.kw) [ 1722.249374] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1722.249374] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] raise exceptions.translate_fault(task_info.error) [ 1722.249374] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1722.249374] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Faults: ['InvalidArgument'] [ 1722.249374] env[68798]: ERROR nova.compute.manager [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] [ 1722.250230] env[68798]: DEBUG nova.compute.utils [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1722.251626] env[68798]: DEBUG nova.compute.manager [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Build of instance da5d9023-f6c1-44f8-9465-36aa2b109924 was re-scheduled: A specified parameter was not correct: fileType [ 1722.251626] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1722.251983] env[68798]: DEBUG nova.compute.manager [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1722.252174] env[68798]: DEBUG nova.compute.manager [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1722.252349] env[68798]: DEBUG nova.compute.manager [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1722.252512] env[68798]: DEBUG nova.network.neutron [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1722.967739] env[68798]: DEBUG nova.network.neutron [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1722.979772] env[68798]: INFO nova.compute.manager [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Took 0.73 seconds to deallocate network for instance. [ 1723.102280] env[68798]: INFO nova.scheduler.client.report [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Deleted allocations for instance da5d9023-f6c1-44f8-9465-36aa2b109924 [ 1723.128637] env[68798]: DEBUG oslo_concurrency.lockutils [None req-14d54362-b527-43b6-a0d7-7129887230e4 tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Lock "da5d9023-f6c1-44f8-9465-36aa2b109924" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 618.914s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.130036] env[68798]: DEBUG oslo_concurrency.lockutils [None req-1c7428ac-0d95-4dd9-b70a-df297e33044e tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Lock "da5d9023-f6c1-44f8-9465-36aa2b109924" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 423.735s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.130326] env[68798]: DEBUG oslo_concurrency.lockutils [None req-1c7428ac-0d95-4dd9-b70a-df297e33044e tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Acquiring lock "da5d9023-f6c1-44f8-9465-36aa2b109924-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1723.130542] env[68798]: DEBUG oslo_concurrency.lockutils [None req-1c7428ac-0d95-4dd9-b70a-df297e33044e tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Lock "da5d9023-f6c1-44f8-9465-36aa2b109924-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.130710] env[68798]: DEBUG oslo_concurrency.lockutils [None req-1c7428ac-0d95-4dd9-b70a-df297e33044e tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Lock "da5d9023-f6c1-44f8-9465-36aa2b109924-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.132721] env[68798]: INFO nova.compute.manager [None req-1c7428ac-0d95-4dd9-b70a-df297e33044e tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Terminating instance [ 1723.136248] env[68798]: DEBUG nova.compute.manager [None req-1c7428ac-0d95-4dd9-b70a-df297e33044e tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1723.136463] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-1c7428ac-0d95-4dd9-b70a-df297e33044e tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1723.136714] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-caa4100f-6b77-4999-b887-8f0e51b06390 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.140606] env[68798]: DEBUG nova.compute.manager [None req-af258cdd-553a-4a55-be18-bdd5b4780225 tempest-ServerPasswordTestJSON-123226599 tempest-ServerPasswordTestJSON-123226599-project-member] [instance: c0c535e6-e833-4b6a-870a-e1add9625765] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1723.147071] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc6f7de8-204b-4703-a571-58917e5ca90f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.165604] env[68798]: DEBUG nova.compute.manager [None req-af258cdd-553a-4a55-be18-bdd5b4780225 tempest-ServerPasswordTestJSON-123226599 tempest-ServerPasswordTestJSON-123226599-project-member] [instance: c0c535e6-e833-4b6a-870a-e1add9625765] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1723.178020] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-1c7428ac-0d95-4dd9-b70a-df297e33044e tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance da5d9023-f6c1-44f8-9465-36aa2b109924 could not be found. [ 1723.178365] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-1c7428ac-0d95-4dd9-b70a-df297e33044e tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1723.178447] env[68798]: INFO nova.compute.manager [None req-1c7428ac-0d95-4dd9-b70a-df297e33044e tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1723.178709] env[68798]: DEBUG oslo.service.loopingcall [None req-1c7428ac-0d95-4dd9-b70a-df297e33044e tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1723.178946] env[68798]: DEBUG nova.compute.manager [-] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1723.179097] env[68798]: DEBUG nova.network.neutron [-] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1723.194400] env[68798]: DEBUG oslo_concurrency.lockutils [None req-af258cdd-553a-4a55-be18-bdd5b4780225 tempest-ServerPasswordTestJSON-123226599 tempest-ServerPasswordTestJSON-123226599-project-member] Lock "c0c535e6-e833-4b6a-870a-e1add9625765" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.845s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.204246] env[68798]: DEBUG nova.compute.manager [None req-5b946144-b26d-41ff-b248-adf617e39ff4 tempest-ServerActionsV293TestJSON-878530011 tempest-ServerActionsV293TestJSON-878530011-project-member] [instance: 31f33c3a-e089-4ed5-b60c-3c2d5f55a529] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1723.207626] env[68798]: DEBUG nova.network.neutron [-] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1723.217304] env[68798]: INFO nova.compute.manager [-] [instance: da5d9023-f6c1-44f8-9465-36aa2b109924] Took 0.04 seconds to deallocate network for instance. [ 1723.241812] env[68798]: DEBUG nova.compute.manager [None req-5b946144-b26d-41ff-b248-adf617e39ff4 tempest-ServerActionsV293TestJSON-878530011 tempest-ServerActionsV293TestJSON-878530011-project-member] [instance: 31f33c3a-e089-4ed5-b60c-3c2d5f55a529] Instance disappeared before build. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1723.264944] env[68798]: DEBUG oslo_concurrency.lockutils [None req-5b946144-b26d-41ff-b248-adf617e39ff4 tempest-ServerActionsV293TestJSON-878530011 tempest-ServerActionsV293TestJSON-878530011-project-member] Lock "31f33c3a-e089-4ed5-b60c-3c2d5f55a529" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.115s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.274906] env[68798]: DEBUG nova.compute.manager [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1723.314274] env[68798]: DEBUG oslo_concurrency.lockutils [None req-1c7428ac-0d95-4dd9-b70a-df297e33044e tempest-ImagesNegativeTestJSON-280462600 tempest-ImagesNegativeTestJSON-280462600-project-member] Lock "da5d9023-f6c1-44f8-9465-36aa2b109924" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.184s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.328383] env[68798]: DEBUG oslo_concurrency.lockutils [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1723.328639] env[68798]: DEBUG oslo_concurrency.lockutils [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.330153] env[68798]: INFO nova.compute.claims [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1723.530808] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-398554be-d38b-46ce-b3c6-90ab924fba1e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.538991] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8622e262-2b42-451d-9d85-b5b505b9b5fd {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.569668] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17516852-a94f-476c-9ec6-9719b03935c8 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.577300] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86068bc4-e1c5-4368-b0fc-94074015fcc5 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.590491] env[68798]: DEBUG nova.compute.provider_tree [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1723.599608] env[68798]: DEBUG nova.scheduler.client.report [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1723.613387] env[68798]: DEBUG oslo_concurrency.lockutils [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.284s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.613613] env[68798]: DEBUG nova.compute.manager [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1723.649428] env[68798]: DEBUG nova.compute.utils [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1723.650938] env[68798]: DEBUG nova.compute.manager [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1723.651126] env[68798]: DEBUG nova.network.neutron [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1723.663696] env[68798]: DEBUG nova.compute.manager [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1723.715192] env[68798]: DEBUG nova.policy [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4591576f20d142a0a68342f8a1c9bfc4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f5b33dbd010340649a5c38226ec87f36', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 1723.731013] env[68798]: DEBUG nova.compute.manager [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1723.759456] env[68798]: DEBUG nova.virt.hardware [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1723.759706] env[68798]: DEBUG nova.virt.hardware [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1723.759862] env[68798]: DEBUG nova.virt.hardware [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1723.760065] env[68798]: DEBUG nova.virt.hardware [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1723.760213] env[68798]: DEBUG nova.virt.hardware [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1723.760359] env[68798]: DEBUG nova.virt.hardware [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1723.760567] env[68798]: DEBUG nova.virt.hardware [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1723.760726] env[68798]: DEBUG nova.virt.hardware [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1723.760946] env[68798]: DEBUG nova.virt.hardware [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1723.761104] env[68798]: DEBUG nova.virt.hardware [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1723.761225] env[68798]: DEBUG nova.virt.hardware [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1723.762126] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7621237b-db8f-4218-936c-045c9655d139 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.771057] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e31448e-1e7f-4693-ba1f-31656083bbf0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.130835] env[68798]: DEBUG nova.network.neutron [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Successfully created port: f4e24c7b-b19b-4bd9-8bbc-4fc32787d747 {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1724.814935] env[68798]: DEBUG nova.network.neutron [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Successfully updated port: f4e24c7b-b19b-4bd9-8bbc-4fc32787d747 {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1724.828934] env[68798]: DEBUG oslo_concurrency.lockutils [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquiring lock "refresh_cache-57a34323-ebdd-4495-ab62-f7b82ab804d9" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1724.829096] env[68798]: DEBUG oslo_concurrency.lockutils [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquired lock "refresh_cache-57a34323-ebdd-4495-ab62-f7b82ab804d9" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1724.830516] env[68798]: DEBUG nova.network.neutron [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1724.881257] env[68798]: DEBUG nova.network.neutron [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1725.018316] env[68798]: DEBUG nova.compute.manager [req-dba4acc6-97f9-4a0a-8c73-750648749fa8 req-518fa120-c593-4b3d-8cd1-0e1c46d78227 service nova] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Received event network-vif-plugged-f4e24c7b-b19b-4bd9-8bbc-4fc32787d747 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1725.018692] env[68798]: DEBUG oslo_concurrency.lockutils [req-dba4acc6-97f9-4a0a-8c73-750648749fa8 req-518fa120-c593-4b3d-8cd1-0e1c46d78227 service nova] Acquiring lock "57a34323-ebdd-4495-ab62-f7b82ab804d9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.019028] env[68798]: DEBUG oslo_concurrency.lockutils [req-dba4acc6-97f9-4a0a-8c73-750648749fa8 req-518fa120-c593-4b3d-8cd1-0e1c46d78227 service nova] Lock "57a34323-ebdd-4495-ab62-f7b82ab804d9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1725.019310] env[68798]: DEBUG oslo_concurrency.lockutils [req-dba4acc6-97f9-4a0a-8c73-750648749fa8 req-518fa120-c593-4b3d-8cd1-0e1c46d78227 service nova] Lock "57a34323-ebdd-4495-ab62-f7b82ab804d9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1725.019580] env[68798]: DEBUG nova.compute.manager [req-dba4acc6-97f9-4a0a-8c73-750648749fa8 req-518fa120-c593-4b3d-8cd1-0e1c46d78227 service nova] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] No waiting events found dispatching network-vif-plugged-f4e24c7b-b19b-4bd9-8bbc-4fc32787d747 {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1725.019849] env[68798]: WARNING nova.compute.manager [req-dba4acc6-97f9-4a0a-8c73-750648749fa8 req-518fa120-c593-4b3d-8cd1-0e1c46d78227 service nova] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Received unexpected event network-vif-plugged-f4e24c7b-b19b-4bd9-8bbc-4fc32787d747 for instance with vm_state building and task_state spawning. [ 1725.020139] env[68798]: DEBUG nova.compute.manager [req-dba4acc6-97f9-4a0a-8c73-750648749fa8 req-518fa120-c593-4b3d-8cd1-0e1c46d78227 service nova] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Received event network-changed-f4e24c7b-b19b-4bd9-8bbc-4fc32787d747 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1725.020372] env[68798]: DEBUG nova.compute.manager [req-dba4acc6-97f9-4a0a-8c73-750648749fa8 req-518fa120-c593-4b3d-8cd1-0e1c46d78227 service nova] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Refreshing instance network info cache due to event network-changed-f4e24c7b-b19b-4bd9-8bbc-4fc32787d747. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1725.020630] env[68798]: DEBUG oslo_concurrency.lockutils [req-dba4acc6-97f9-4a0a-8c73-750648749fa8 req-518fa120-c593-4b3d-8cd1-0e1c46d78227 service nova] Acquiring lock "refresh_cache-57a34323-ebdd-4495-ab62-f7b82ab804d9" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1725.065610] env[68798]: DEBUG nova.network.neutron [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Updating instance_info_cache with network_info: [{"id": "f4e24c7b-b19b-4bd9-8bbc-4fc32787d747", "address": "fa:16:3e:7c:da:1a", "network": {"id": "052d5114-ea0a-492e-aada-eb5e2258f0e6", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1119146414-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5b33dbd010340649a5c38226ec87f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4e24c7b-b1", "ovs_interfaceid": "f4e24c7b-b19b-4bd9-8bbc-4fc32787d747", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1725.079065] env[68798]: DEBUG oslo_concurrency.lockutils [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Releasing lock "refresh_cache-57a34323-ebdd-4495-ab62-f7b82ab804d9" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1725.079472] env[68798]: DEBUG nova.compute.manager [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Instance network_info: |[{"id": "f4e24c7b-b19b-4bd9-8bbc-4fc32787d747", "address": "fa:16:3e:7c:da:1a", "network": {"id": "052d5114-ea0a-492e-aada-eb5e2258f0e6", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1119146414-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5b33dbd010340649a5c38226ec87f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4e24c7b-b1", "ovs_interfaceid": "f4e24c7b-b19b-4bd9-8bbc-4fc32787d747", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1725.079857] env[68798]: DEBUG oslo_concurrency.lockutils [req-dba4acc6-97f9-4a0a-8c73-750648749fa8 req-518fa120-c593-4b3d-8cd1-0e1c46d78227 service nova] Acquired lock "refresh_cache-57a34323-ebdd-4495-ab62-f7b82ab804d9" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1725.080109] env[68798]: DEBUG nova.network.neutron [req-dba4acc6-97f9-4a0a-8c73-750648749fa8 req-518fa120-c593-4b3d-8cd1-0e1c46d78227 service nova] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Refreshing network info cache for port f4e24c7b-b19b-4bd9-8bbc-4fc32787d747 {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1725.081783] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7c:da:1a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5e1049e8-c06b-4c93-a9e1-2cbb530f3f95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f4e24c7b-b19b-4bd9-8bbc-4fc32787d747', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1725.095451] env[68798]: DEBUG oslo.service.loopingcall [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1725.100231] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1725.101251] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c473a9b5-e8dd-42e8-b438-473561ea4d36 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.127554] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1725.127554] env[68798]: value = "task-4217688" [ 1725.127554] env[68798]: _type = "Task" [ 1725.127554] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.136787] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217688, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.494467] env[68798]: DEBUG nova.network.neutron [req-dba4acc6-97f9-4a0a-8c73-750648749fa8 req-518fa120-c593-4b3d-8cd1-0e1c46d78227 service nova] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Updated VIF entry in instance network info cache for port f4e24c7b-b19b-4bd9-8bbc-4fc32787d747. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1725.494921] env[68798]: DEBUG nova.network.neutron [req-dba4acc6-97f9-4a0a-8c73-750648749fa8 req-518fa120-c593-4b3d-8cd1-0e1c46d78227 service nova] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Updating instance_info_cache with network_info: [{"id": "f4e24c7b-b19b-4bd9-8bbc-4fc32787d747", "address": "fa:16:3e:7c:da:1a", "network": {"id": "052d5114-ea0a-492e-aada-eb5e2258f0e6", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1119146414-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5b33dbd010340649a5c38226ec87f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e1049e8-c06b-4c93-a9e1-2cbb530f3f95", "external-id": "nsx-vlan-transportzone-966", "segmentation_id": 966, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4e24c7b-b1", "ovs_interfaceid": "f4e24c7b-b19b-4bd9-8bbc-4fc32787d747", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1725.505310] env[68798]: DEBUG oslo_concurrency.lockutils [req-dba4acc6-97f9-4a0a-8c73-750648749fa8 req-518fa120-c593-4b3d-8cd1-0e1c46d78227 service nova] Releasing lock "refresh_cache-57a34323-ebdd-4495-ab62-f7b82ab804d9" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1725.638605] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217688, 'name': CreateVM_Task, 'duration_secs': 0.310438} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.638801] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1725.639507] env[68798]: DEBUG oslo_concurrency.lockutils [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1725.639702] env[68798]: DEBUG oslo_concurrency.lockutils [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1725.640036] env[68798]: DEBUG oslo_concurrency.lockutils [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1725.640291] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e3e56b2-1208-48be-8e13-5ce9df052cf5 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.644969] env[68798]: DEBUG oslo_vmware.api [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Waiting for the task: (returnval){ [ 1725.644969] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]528d10ab-5bf3-e2f9-9d09-77c6c38bae6e" [ 1725.644969] env[68798]: _type = "Task" [ 1725.644969] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.653886] env[68798]: DEBUG oslo_vmware.api [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]528d10ab-5bf3-e2f9-9d09-77c6c38bae6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.155316] env[68798]: DEBUG oslo_concurrency.lockutils [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1726.155711] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1726.155813] env[68798]: DEBUG oslo_concurrency.lockutils [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1733.971489] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ae07cce4-3d91-4f6a-9966-a0934f520ac6 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquiring lock "57a34323-ebdd-4495-ab62-f7b82ab804d9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1770.515423] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1770.737035] env[68798]: WARNING oslo_vmware.rw_handles [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1770.737035] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1770.737035] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1770.737035] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1770.737035] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1770.737035] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 1770.737035] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1770.737035] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1770.737035] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1770.737035] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1770.737035] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1770.737035] env[68798]: ERROR oslo_vmware.rw_handles [ 1770.737035] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/09726e51-34a2-44cd-8733-7be95f74324c/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1770.739183] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1770.739490] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Copying Virtual Disk [datastore1] vmware_temp/09726e51-34a2-44cd-8733-7be95f74324c/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/09726e51-34a2-44cd-8733-7be95f74324c/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1770.739780] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bfe0f20b-15ba-4e9d-b8e9-efb9f321ae96 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.748396] env[68798]: DEBUG oslo_vmware.api [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Waiting for the task: (returnval){ [ 1770.748396] env[68798]: value = "task-4217689" [ 1770.748396] env[68798]: _type = "Task" [ 1770.748396] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.758037] env[68798]: DEBUG oslo_vmware.api [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Task: {'id': task-4217689, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.048995] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1771.049264] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1771.049394] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1771.073491] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1771.073713] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1771.073865] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1771.074041] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1771.074212] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1771.074356] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1771.074509] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1771.074678] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1771.074813] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1771.074961] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1771.075143] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1771.075750] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1771.075924] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1771.076133] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1771.076333] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1771.076477] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1771.259906] env[68798]: DEBUG oslo_vmware.exceptions [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1771.260254] env[68798]: DEBUG oslo_concurrency.lockutils [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1771.260848] env[68798]: ERROR nova.compute.manager [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1771.260848] env[68798]: Faults: ['InvalidArgument'] [ 1771.260848] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Traceback (most recent call last): [ 1771.260848] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1771.260848] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] yield resources [ 1771.260848] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1771.260848] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] self.driver.spawn(context, instance, image_meta, [ 1771.260848] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1771.260848] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1771.260848] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1771.260848] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] self._fetch_image_if_missing(context, vi) [ 1771.260848] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1771.260848] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] image_cache(vi, tmp_image_ds_loc) [ 1771.260848] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1771.260848] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] vm_util.copy_virtual_disk( [ 1771.260848] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1771.260848] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] session._wait_for_task(vmdk_copy_task) [ 1771.260848] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1771.260848] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] return self.wait_for_task(task_ref) [ 1771.260848] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1771.260848] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] return evt.wait() [ 1771.260848] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1771.260848] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] result = hub.switch() [ 1771.260848] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1771.260848] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] return self.greenlet.switch() [ 1771.260848] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1771.260848] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] self.f(*self.args, **self.kw) [ 1771.260848] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1771.260848] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] raise exceptions.translate_fault(task_info.error) [ 1771.260848] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1771.260848] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Faults: ['InvalidArgument'] [ 1771.260848] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] [ 1771.261948] env[68798]: INFO nova.compute.manager [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Terminating instance [ 1771.262957] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1771.263194] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1771.263451] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-27d30431-99c7-4045-a8c5-2060535d0592 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.266368] env[68798]: DEBUG nova.compute.manager [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1771.266573] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1771.267405] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1efcbf32-4104-42e5-a81c-750933d4ec72 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.275767] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1771.276008] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-297a13a3-5264-460b-bc08-3905846a2dd0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.278911] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1771.279215] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1771.280416] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b152fc0d-d692-438d-a1e9-2c99f3adca63 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.287491] env[68798]: DEBUG oslo_vmware.api [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Waiting for the task: (returnval){ [ 1771.287491] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]521336a9-1ef6-65cd-4846-f78a05326d9e" [ 1771.287491] env[68798]: _type = "Task" [ 1771.287491] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.295825] env[68798]: DEBUG oslo_vmware.api [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]521336a9-1ef6-65cd-4846-f78a05326d9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.360531] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1771.360814] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1771.361065] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Deleting the datastore file [datastore1] e75b2848-5dfa-4ffa-b37a-6338c8221dd3 {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1771.361396] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b599636d-1b73-4d7c-a3dd-beb01044acba {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.368886] env[68798]: DEBUG oslo_vmware.api [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Waiting for the task: (returnval){ [ 1771.368886] env[68798]: value = "task-4217691" [ 1771.368886] env[68798]: _type = "Task" [ 1771.368886] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.377825] env[68798]: DEBUG oslo_vmware.api [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Task: {'id': task-4217691, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.798093] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1771.798535] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Creating directory with path [datastore1] vmware_temp/b7e05e19-4e00-4fbb-b755-60395e6b0b99/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1771.798639] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bcdd6643-f895-4628-aad4-f8448623496d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.811494] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Created directory with path [datastore1] vmware_temp/b7e05e19-4e00-4fbb-b755-60395e6b0b99/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1771.811710] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Fetch image to [datastore1] vmware_temp/b7e05e19-4e00-4fbb-b755-60395e6b0b99/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1771.811896] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/b7e05e19-4e00-4fbb-b755-60395e6b0b99/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1771.812678] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a594859-d371-4b2d-9952-f894c790cd75 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.820017] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0341b622-ce40-4d9f-b5d7-e52cb85669d7 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.829591] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ef1a92-050f-473f-9b55-f29be66ad7a2 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.861084] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f7fdbe2-9c7a-40ff-b9d5-f98a694888d6 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.867084] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4e7f4fb2-398f-48cc-9d47-9e9c8518dd03 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.876802] env[68798]: DEBUG oslo_vmware.api [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Task: {'id': task-4217691, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077621} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1771.877057] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1771.877248] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1771.877422] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1771.877594] env[68798]: INFO nova.compute.manager [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1771.879746] env[68798]: DEBUG nova.compute.claims [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1771.879914] env[68798]: DEBUG oslo_concurrency.lockutils [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1771.880151] env[68798]: DEBUG oslo_concurrency.lockutils [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1771.889228] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1771.947377] env[68798]: DEBUG oslo_vmware.rw_handles [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b7e05e19-4e00-4fbb-b755-60395e6b0b99/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1772.007298] env[68798]: DEBUG oslo_vmware.rw_handles [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1772.007493] env[68798]: DEBUG oslo_vmware.rw_handles [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b7e05e19-4e00-4fbb-b755-60395e6b0b99/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1772.132193] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a0fb6e8-027f-46ef-9387-722e7ef85303 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.139675] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d0653f-45fb-4481-b03e-db13fc3e8991 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.170135] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db054756-7cd6-4c01-ba19-d81743b27dbb {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.178210] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d0728dd-8a8f-4587-9db8-fd62440a1e8f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.191978] env[68798]: DEBUG nova.compute.provider_tree [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1772.200903] env[68798]: DEBUG nova.scheduler.client.report [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1772.217626] env[68798]: DEBUG oslo_concurrency.lockutils [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.337s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1772.218242] env[68798]: ERROR nova.compute.manager [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1772.218242] env[68798]: Faults: ['InvalidArgument'] [ 1772.218242] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Traceback (most recent call last): [ 1772.218242] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1772.218242] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] self.driver.spawn(context, instance, image_meta, [ 1772.218242] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1772.218242] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1772.218242] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1772.218242] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] self._fetch_image_if_missing(context, vi) [ 1772.218242] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1772.218242] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] image_cache(vi, tmp_image_ds_loc) [ 1772.218242] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1772.218242] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] vm_util.copy_virtual_disk( [ 1772.218242] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1772.218242] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] session._wait_for_task(vmdk_copy_task) [ 1772.218242] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1772.218242] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] return self.wait_for_task(task_ref) [ 1772.218242] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1772.218242] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] return evt.wait() [ 1772.218242] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1772.218242] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] result = hub.switch() [ 1772.218242] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1772.218242] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] return self.greenlet.switch() [ 1772.218242] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1772.218242] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] self.f(*self.args, **self.kw) [ 1772.218242] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1772.218242] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] raise exceptions.translate_fault(task_info.error) [ 1772.218242] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1772.218242] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Faults: ['InvalidArgument'] [ 1772.218242] env[68798]: ERROR nova.compute.manager [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] [ 1772.219736] env[68798]: DEBUG nova.compute.utils [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1772.220610] env[68798]: DEBUG nova.compute.manager [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Build of instance e75b2848-5dfa-4ffa-b37a-6338c8221dd3 was re-scheduled: A specified parameter was not correct: fileType [ 1772.220610] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1772.220997] env[68798]: DEBUG nova.compute.manager [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1772.221190] env[68798]: DEBUG nova.compute.manager [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1772.221366] env[68798]: DEBUG nova.compute.manager [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1772.221528] env[68798]: DEBUG nova.network.neutron [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1772.646497] env[68798]: DEBUG nova.network.neutron [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1772.660911] env[68798]: INFO nova.compute.manager [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Took 0.44 seconds to deallocate network for instance. [ 1772.761976] env[68798]: INFO nova.scheduler.client.report [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Deleted allocations for instance e75b2848-5dfa-4ffa-b37a-6338c8221dd3 [ 1772.786087] env[68798]: DEBUG oslo_concurrency.lockutils [None req-72c9e2f1-2243-4c3b-b258-c42040cc4023 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "e75b2848-5dfa-4ffa-b37a-6338c8221dd3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 628.473s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1772.787396] env[68798]: DEBUG oslo_concurrency.lockutils [None req-5e0a0380-f4d7-4d3e-a209-35cc0a8a3eba tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "e75b2848-5dfa-4ffa-b37a-6338c8221dd3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 431.841s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1772.787593] env[68798]: DEBUG oslo_concurrency.lockutils [None req-5e0a0380-f4d7-4d3e-a209-35cc0a8a3eba tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquiring lock "e75b2848-5dfa-4ffa-b37a-6338c8221dd3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1772.787831] env[68798]: DEBUG oslo_concurrency.lockutils [None req-5e0a0380-f4d7-4d3e-a209-35cc0a8a3eba tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "e75b2848-5dfa-4ffa-b37a-6338c8221dd3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1772.788009] env[68798]: DEBUG oslo_concurrency.lockutils [None req-5e0a0380-f4d7-4d3e-a209-35cc0a8a3eba tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "e75b2848-5dfa-4ffa-b37a-6338c8221dd3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1772.790256] env[68798]: INFO nova.compute.manager [None req-5e0a0380-f4d7-4d3e-a209-35cc0a8a3eba tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Terminating instance [ 1772.792293] env[68798]: DEBUG nova.compute.manager [None req-5e0a0380-f4d7-4d3e-a209-35cc0a8a3eba tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1772.792293] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0a0380-f4d7-4d3e-a209-35cc0a8a3eba tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1772.792976] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-26326bb4-b1af-4155-8cb8-f3137732bc57 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.804431] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f5753aa-f056-4c8a-8752-59c6be64e8d4 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.815718] env[68798]: DEBUG nova.compute.manager [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1772.838917] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-5e0a0380-f4d7-4d3e-a209-35cc0a8a3eba tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e75b2848-5dfa-4ffa-b37a-6338c8221dd3 could not be found. [ 1772.839170] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0a0380-f4d7-4d3e-a209-35cc0a8a3eba tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1772.839367] env[68798]: INFO nova.compute.manager [None req-5e0a0380-f4d7-4d3e-a209-35cc0a8a3eba tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1772.839668] env[68798]: DEBUG oslo.service.loopingcall [None req-5e0a0380-f4d7-4d3e-a209-35cc0a8a3eba tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1772.840166] env[68798]: DEBUG nova.compute.manager [-] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1772.840166] env[68798]: DEBUG nova.network.neutron [-] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1772.865228] env[68798]: DEBUG nova.network.neutron [-] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1772.868013] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1772.868286] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1772.869987] env[68798]: INFO nova.compute.claims [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1772.874516] env[68798]: INFO nova.compute.manager [-] [instance: e75b2848-5dfa-4ffa-b37a-6338c8221dd3] Took 0.03 seconds to deallocate network for instance. [ 1772.994096] env[68798]: DEBUG oslo_concurrency.lockutils [None req-5e0a0380-f4d7-4d3e-a209-35cc0a8a3eba tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "e75b2848-5dfa-4ffa-b37a-6338c8221dd3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.207s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.050492] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1773.050847] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1773.051009] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Cleaning up deleted instances {{(pid=68798) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11220}} [ 1773.062641] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] There are 0 instances to clean {{(pid=68798) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 1773.073995] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e3b08a-e9c6-4fdb-9b03-73ac290472bd {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.082215] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e75c5a3-2626-4fb0-b840-f29e06e957db {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.115090] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fbab19d-8151-41b4-8c5e-90dbdb3fb043 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.123149] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1814d6b6-915b-4768-aa00-03a471630662 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.136950] env[68798]: DEBUG nova.compute.provider_tree [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1773.145481] env[68798]: DEBUG nova.scheduler.client.report [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1773.159660] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.291s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.160243] env[68798]: DEBUG nova.compute.manager [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1773.195836] env[68798]: DEBUG nova.compute.utils [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1773.197430] env[68798]: DEBUG nova.compute.manager [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1773.197601] env[68798]: DEBUG nova.network.neutron [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1773.207050] env[68798]: DEBUG nova.compute.manager [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1773.260233] env[68798]: DEBUG nova.policy [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '58e7ee34608848b39cc2a7114e7d682d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8efeea8a59294c7ca8b499dda555a3d6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 1773.274015] env[68798]: DEBUG nova.compute.manager [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1773.301966] env[68798]: DEBUG nova.virt.hardware [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1773.302233] env[68798]: DEBUG nova.virt.hardware [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1773.302394] env[68798]: DEBUG nova.virt.hardware [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1773.302700] env[68798]: DEBUG nova.virt.hardware [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1773.302798] env[68798]: DEBUG nova.virt.hardware [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1773.302867] env[68798]: DEBUG nova.virt.hardware [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1773.303098] env[68798]: DEBUG nova.virt.hardware [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1773.303265] env[68798]: DEBUG nova.virt.hardware [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1773.303433] env[68798]: DEBUG nova.virt.hardware [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1773.303598] env[68798]: DEBUG nova.virt.hardware [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1773.303821] env[68798]: DEBUG nova.virt.hardware [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1773.304744] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-653c66fe-382c-4c67-baa7-d1dfb98fa155 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.313490] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71738e5a-fa38-43e8-953d-2859c63c0b79 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.705367] env[68798]: DEBUG nova.network.neutron [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Successfully created port: 111e5830-af9a-4d7a-ada8-b7cc32b6f37e {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1774.361187] env[68798]: DEBUG nova.network.neutron [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Successfully updated port: 111e5830-af9a-4d7a-ada8-b7cc32b6f37e {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1774.373730] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquiring lock "refresh_cache-6f0e769a-33db-48c6-9a88-cceb310cb819" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1774.373936] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquired lock "refresh_cache-6f0e769a-33db-48c6-9a88-cceb310cb819" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1774.374048] env[68798]: DEBUG nova.network.neutron [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1774.419678] env[68798]: DEBUG nova.network.neutron [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1774.644400] env[68798]: DEBUG nova.network.neutron [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Updating instance_info_cache with network_info: [{"id": "111e5830-af9a-4d7a-ada8-b7cc32b6f37e", "address": "fa:16:3e:c6:a5:2c", "network": {"id": "0b7b81db-f8e9-4c4a-9e95-8ce4e76ecc2f", "bridge": "br-int", "label": "tempest-ServersTestJSON-1522738958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efeea8a59294c7ca8b499dda555a3d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c6324fd-a761-417c-bc85-b6278daecfc5", "external-id": "nsx-vlan-transportzone-426", "segmentation_id": 426, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap111e5830-af", "ovs_interfaceid": "111e5830-af9a-4d7a-ada8-b7cc32b6f37e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1774.656874] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Releasing lock "refresh_cache-6f0e769a-33db-48c6-9a88-cceb310cb819" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1774.657289] env[68798]: DEBUG nova.compute.manager [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Instance network_info: |[{"id": "111e5830-af9a-4d7a-ada8-b7cc32b6f37e", "address": "fa:16:3e:c6:a5:2c", "network": {"id": "0b7b81db-f8e9-4c4a-9e95-8ce4e76ecc2f", "bridge": "br-int", "label": "tempest-ServersTestJSON-1522738958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efeea8a59294c7ca8b499dda555a3d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c6324fd-a761-417c-bc85-b6278daecfc5", "external-id": "nsx-vlan-transportzone-426", "segmentation_id": 426, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap111e5830-af", "ovs_interfaceid": "111e5830-af9a-4d7a-ada8-b7cc32b6f37e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1774.657612] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:a5:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2c6324fd-a761-417c-bc85-b6278daecfc5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '111e5830-af9a-4d7a-ada8-b7cc32b6f37e', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1774.665223] env[68798]: DEBUG oslo.service.loopingcall [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1774.665731] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1774.665967] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-34be0ef3-f1f5-4eb4-a3fd-ccd8be843dd9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.683975] env[68798]: DEBUG nova.compute.manager [req-5a7d5f56-647f-4271-8a87-2a080e6dd587 req-e32e3873-da42-4a42-949f-5918f31057cd service nova] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Received event network-vif-plugged-111e5830-af9a-4d7a-ada8-b7cc32b6f37e {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1774.684226] env[68798]: DEBUG oslo_concurrency.lockutils [req-5a7d5f56-647f-4271-8a87-2a080e6dd587 req-e32e3873-da42-4a42-949f-5918f31057cd service nova] Acquiring lock "6f0e769a-33db-48c6-9a88-cceb310cb819-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.684430] env[68798]: DEBUG oslo_concurrency.lockutils [req-5a7d5f56-647f-4271-8a87-2a080e6dd587 req-e32e3873-da42-4a42-949f-5918f31057cd service nova] Lock "6f0e769a-33db-48c6-9a88-cceb310cb819-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1774.684588] env[68798]: DEBUG oslo_concurrency.lockutils [req-5a7d5f56-647f-4271-8a87-2a080e6dd587 req-e32e3873-da42-4a42-949f-5918f31057cd service nova] Lock "6f0e769a-33db-48c6-9a88-cceb310cb819-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1774.684754] env[68798]: DEBUG nova.compute.manager [req-5a7d5f56-647f-4271-8a87-2a080e6dd587 req-e32e3873-da42-4a42-949f-5918f31057cd service nova] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] No waiting events found dispatching network-vif-plugged-111e5830-af9a-4d7a-ada8-b7cc32b6f37e {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1774.684923] env[68798]: WARNING nova.compute.manager [req-5a7d5f56-647f-4271-8a87-2a080e6dd587 req-e32e3873-da42-4a42-949f-5918f31057cd service nova] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Received unexpected event network-vif-plugged-111e5830-af9a-4d7a-ada8-b7cc32b6f37e for instance with vm_state building and task_state spawning. [ 1774.685098] env[68798]: DEBUG nova.compute.manager [req-5a7d5f56-647f-4271-8a87-2a080e6dd587 req-e32e3873-da42-4a42-949f-5918f31057cd service nova] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Received event network-changed-111e5830-af9a-4d7a-ada8-b7cc32b6f37e {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1774.685293] env[68798]: DEBUG nova.compute.manager [req-5a7d5f56-647f-4271-8a87-2a080e6dd587 req-e32e3873-da42-4a42-949f-5918f31057cd service nova] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Refreshing instance network info cache due to event network-changed-111e5830-af9a-4d7a-ada8-b7cc32b6f37e. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1774.685494] env[68798]: DEBUG oslo_concurrency.lockutils [req-5a7d5f56-647f-4271-8a87-2a080e6dd587 req-e32e3873-da42-4a42-949f-5918f31057cd service nova] Acquiring lock "refresh_cache-6f0e769a-33db-48c6-9a88-cceb310cb819" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1774.685621] env[68798]: DEBUG oslo_concurrency.lockutils [req-5a7d5f56-647f-4271-8a87-2a080e6dd587 req-e32e3873-da42-4a42-949f-5918f31057cd service nova] Acquired lock "refresh_cache-6f0e769a-33db-48c6-9a88-cceb310cb819" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1774.685776] env[68798]: DEBUG nova.network.neutron [req-5a7d5f56-647f-4271-8a87-2a080e6dd587 req-e32e3873-da42-4a42-949f-5918f31057cd service nova] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Refreshing network info cache for port 111e5830-af9a-4d7a-ada8-b7cc32b6f37e {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1774.693036] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1774.693036] env[68798]: value = "task-4217692" [ 1774.693036] env[68798]: _type = "Task" [ 1774.693036] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.704402] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217692, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.202969] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217692, 'name': CreateVM_Task, 'duration_secs': 0.31869} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.203139] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1775.203734] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1775.203929] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1775.204234] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1775.204484] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca6cb299-ebd8-46f3-b5bd-6e16bdb121a7 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.209739] env[68798]: DEBUG oslo_vmware.api [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Waiting for the task: (returnval){ [ 1775.209739] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52e40b8e-407d-9812-5a3d-91f10a0d8685" [ 1775.209739] env[68798]: _type = "Task" [ 1775.209739] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.220336] env[68798]: DEBUG oslo_vmware.api [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52e40b8e-407d-9812-5a3d-91f10a0d8685, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.235310] env[68798]: DEBUG nova.network.neutron [req-5a7d5f56-647f-4271-8a87-2a080e6dd587 req-e32e3873-da42-4a42-949f-5918f31057cd service nova] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Updated VIF entry in instance network info cache for port 111e5830-af9a-4d7a-ada8-b7cc32b6f37e. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1775.235924] env[68798]: DEBUG nova.network.neutron [req-5a7d5f56-647f-4271-8a87-2a080e6dd587 req-e32e3873-da42-4a42-949f-5918f31057cd service nova] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Updating instance_info_cache with network_info: [{"id": "111e5830-af9a-4d7a-ada8-b7cc32b6f37e", "address": "fa:16:3e:c6:a5:2c", "network": {"id": "0b7b81db-f8e9-4c4a-9e95-8ce4e76ecc2f", "bridge": "br-int", "label": "tempest-ServersTestJSON-1522738958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efeea8a59294c7ca8b499dda555a3d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c6324fd-a761-417c-bc85-b6278daecfc5", "external-id": "nsx-vlan-transportzone-426", "segmentation_id": 426, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap111e5830-af", "ovs_interfaceid": "111e5830-af9a-4d7a-ada8-b7cc32b6f37e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1775.246044] env[68798]: DEBUG oslo_concurrency.lockutils [req-5a7d5f56-647f-4271-8a87-2a080e6dd587 req-e32e3873-da42-4a42-949f-5918f31057cd service nova] Releasing lock "refresh_cache-6f0e769a-33db-48c6-9a88-cceb310cb819" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1775.722303] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1775.722667] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1775.722784] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1776.048360] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1776.048617] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1776.048771] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Cleaning up deleted instances with incomplete migration {{(pid=68798) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11258}} [ 1776.058627] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1778.061015] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1780.048562] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1780.060784] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1780.061137] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1780.061380] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1780.061609] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1780.062920] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-338b1ff3-511c-4e46-8501-679f3af75ccf {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.072086] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c525c963-c9f4-4e89-9749-105c915542d3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.088013] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef44bc97-f88c-4c0b-81b5-73ff2272bf57 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.095380] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47445b9d-0ab8-46b4-a97e-c18243b77500 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.125425] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180683MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1780.125578] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1780.125774] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1780.282336] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance cbe4e626-f063-4877-985f-b3e36c161c9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1780.282593] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 17cce398-d2f8-47a6-b714-c4e54caec516 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1780.282798] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ca976c34-4eb0-46aa-a243-91401f842c32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1780.282994] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 36980008-f639-4c88-afcf-0dba40420b87 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1780.283192] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance b430775d-fcfb-4233-bc78-87d279e82fb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1780.283371] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 71c99eda-d55d-4d60-92d2-a5553c3c3760 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1780.283551] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance a4e41ed1-2b39-4475-bd13-1680ff46ff6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1780.283758] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ab6c3a4b-2208-49c8-b92f-1f08c0b225f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1780.283973] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 57a34323-ebdd-4495-ab62-f7b82ab804d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1780.284198] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 6f0e769a-33db-48c6-9a88-cceb310cb819 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1780.296384] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ce408b93-3713-4819-8c80-63735d9a5467 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1780.296619] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1780.296766] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1780.313190] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Refreshing inventories for resource provider 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1780.329027] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Updating ProviderTree inventory for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1780.329263] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Updating inventory in ProviderTree for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1780.341599] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Refreshing aggregate associations for resource provider 855bb535-a51f-4f9d-8f32-8a3291b17319, aggregates: None {{(pid=68798) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1780.361746] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Refreshing trait associations for resource provider 855bb535-a51f-4f9d-8f32-8a3291b17319, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=68798) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1780.508869] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcea85fc-fb1d-45af-b5c4-fe76f5d37eb8 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.516806] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f4f59b8-761d-4a42-bd5d-8a6d25db10cd {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.547963] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29a979f6-0693-49c8-b8cd-ff1b8dba9e5a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.555655] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09110e9b-fe44-4af1-afd0-1e226f4a88e9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.569910] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1780.578810] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1780.593513] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1780.593513] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.468s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1789.052380] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._sync_power_states {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1789.075965] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Getting list of instances from cluster (obj){ [ 1789.075965] env[68798]: value = "domain-c8" [ 1789.075965] env[68798]: _type = "ClusterComputeResource" [ 1789.075965] env[68798]: } {{(pid=68798) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1789.077839] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb0ee087-0ee1-4c60-a56f-c817bf50c66c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.095807] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Got total of 10 instances {{(pid=68798) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1789.095989] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Triggering sync for uuid cbe4e626-f063-4877-985f-b3e36c161c9e {{(pid=68798) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 1789.096211] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Triggering sync for uuid 17cce398-d2f8-47a6-b714-c4e54caec516 {{(pid=68798) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 1789.096366] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Triggering sync for uuid ca976c34-4eb0-46aa-a243-91401f842c32 {{(pid=68798) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 1789.096518] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Triggering sync for uuid 36980008-f639-4c88-afcf-0dba40420b87 {{(pid=68798) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 1789.096671] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Triggering sync for uuid b430775d-fcfb-4233-bc78-87d279e82fb5 {{(pid=68798) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 1789.096822] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Triggering sync for uuid 71c99eda-d55d-4d60-92d2-a5553c3c3760 {{(pid=68798) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 1789.096979] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Triggering sync for uuid a4e41ed1-2b39-4475-bd13-1680ff46ff6f {{(pid=68798) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 1789.097178] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Triggering sync for uuid ab6c3a4b-2208-49c8-b92f-1f08c0b225f3 {{(pid=68798) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 1789.097332] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Triggering sync for uuid 57a34323-ebdd-4495-ab62-f7b82ab804d9 {{(pid=68798) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 1789.097481] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Triggering sync for uuid 6f0e769a-33db-48c6-9a88-cceb310cb819 {{(pid=68798) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 1789.097818] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "cbe4e626-f063-4877-985f-b3e36c161c9e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.098158] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "17cce398-d2f8-47a6-b714-c4e54caec516" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.098397] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "ca976c34-4eb0-46aa-a243-91401f842c32" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.098603] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "36980008-f639-4c88-afcf-0dba40420b87" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.098798] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "b430775d-fcfb-4233-bc78-87d279e82fb5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.098996] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "71c99eda-d55d-4d60-92d2-a5553c3c3760" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.099195] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "a4e41ed1-2b39-4475-bd13-1680ff46ff6f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.099390] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "ab6c3a4b-2208-49c8-b92f-1f08c0b225f3" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.099577] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "57a34323-ebdd-4495-ab62-f7b82ab804d9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.099767] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "6f0e769a-33db-48c6-9a88-cceb310cb819" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1791.773017] env[68798]: DEBUG oslo_concurrency.lockutils [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Acquiring lock "de697c7a-bcc4-4d01-a9ec-8467e89d4ada" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1791.773322] env[68798]: DEBUG oslo_concurrency.lockutils [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Lock "de697c7a-bcc4-4d01-a9ec-8467e89d4ada" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1814.850414] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f21e1f00-36e8-4683-9860-a74d02c4298f tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquiring lock "6f0e769a-33db-48c6-9a88-cceb310cb819" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1818.319677] env[68798]: WARNING oslo_vmware.rw_handles [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1818.319677] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1818.319677] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1818.319677] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1818.319677] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1818.319677] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 1818.319677] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1818.319677] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1818.319677] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1818.319677] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1818.319677] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1818.319677] env[68798]: ERROR oslo_vmware.rw_handles [ 1818.320418] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/b7e05e19-4e00-4fbb-b755-60395e6b0b99/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1818.322495] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1818.322814] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Copying Virtual Disk [datastore1] vmware_temp/b7e05e19-4e00-4fbb-b755-60395e6b0b99/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/b7e05e19-4e00-4fbb-b755-60395e6b0b99/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1818.323062] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5a6d3013-331d-4b6a-a004-4e726cb83b72 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.331704] env[68798]: DEBUG oslo_vmware.api [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Waiting for the task: (returnval){ [ 1818.331704] env[68798]: value = "task-4217693" [ 1818.331704] env[68798]: _type = "Task" [ 1818.331704] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.339995] env[68798]: DEBUG oslo_vmware.api [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Task: {'id': task-4217693, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.841954] env[68798]: DEBUG oslo_vmware.exceptions [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1818.842217] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1818.842800] env[68798]: ERROR nova.compute.manager [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1818.842800] env[68798]: Faults: ['InvalidArgument'] [ 1818.842800] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Traceback (most recent call last): [ 1818.842800] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1818.842800] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] yield resources [ 1818.842800] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1818.842800] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] self.driver.spawn(context, instance, image_meta, [ 1818.842800] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1818.842800] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1818.842800] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1818.842800] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] self._fetch_image_if_missing(context, vi) [ 1818.842800] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1818.842800] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] image_cache(vi, tmp_image_ds_loc) [ 1818.842800] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1818.842800] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] vm_util.copy_virtual_disk( [ 1818.842800] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1818.842800] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] session._wait_for_task(vmdk_copy_task) [ 1818.842800] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1818.842800] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] return self.wait_for_task(task_ref) [ 1818.842800] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1818.842800] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] return evt.wait() [ 1818.842800] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1818.842800] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] result = hub.switch() [ 1818.842800] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1818.842800] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] return self.greenlet.switch() [ 1818.842800] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1818.842800] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] self.f(*self.args, **self.kw) [ 1818.842800] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1818.842800] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] raise exceptions.translate_fault(task_info.error) [ 1818.842800] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1818.842800] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Faults: ['InvalidArgument'] [ 1818.842800] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] [ 1818.844126] env[68798]: INFO nova.compute.manager [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Terminating instance [ 1818.844794] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1818.845017] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1818.845273] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5be9ed9b-bbb9-4b60-9267-a36b22c8a033 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.847562] env[68798]: DEBUG nova.compute.manager [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1818.847759] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1818.848606] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d55b850-e4c1-47f2-91b9-cce90c4d1dd2 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.856559] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1818.856877] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-93cc3ddb-7f92-4256-bb29-5bf2faa835ff {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.859293] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1818.859465] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1818.860437] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05fdaf6a-8f8b-46a6-acbb-51e1ee0ae603 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.865964] env[68798]: DEBUG oslo_vmware.api [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Waiting for the task: (returnval){ [ 1818.865964] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]527aebbf-f250-01b6-8800-5b12917a7906" [ 1818.865964] env[68798]: _type = "Task" [ 1818.865964] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.874023] env[68798]: DEBUG oslo_vmware.api [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]527aebbf-f250-01b6-8800-5b12917a7906, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.939587] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1818.939852] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1818.940055] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Deleting the datastore file [datastore1] cbe4e626-f063-4877-985f-b3e36c161c9e {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1818.940332] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-871e1317-2a3f-40ae-ac38-3b1ce17e4c09 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.947644] env[68798]: DEBUG oslo_vmware.api [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Waiting for the task: (returnval){ [ 1818.947644] env[68798]: value = "task-4217695" [ 1818.947644] env[68798]: _type = "Task" [ 1818.947644] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.956572] env[68798]: DEBUG oslo_vmware.api [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Task: {'id': task-4217695, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.377230] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1819.377609] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Creating directory with path [datastore1] vmware_temp/2ae7ed80-a96e-4e1b-8231-9b45f6b360bc/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1819.377763] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b766efb7-afe9-4e0e-8c7c-57834d988be7 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.389783] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Created directory with path [datastore1] vmware_temp/2ae7ed80-a96e-4e1b-8231-9b45f6b360bc/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1819.390012] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Fetch image to [datastore1] vmware_temp/2ae7ed80-a96e-4e1b-8231-9b45f6b360bc/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1819.390193] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/2ae7ed80-a96e-4e1b-8231-9b45f6b360bc/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1819.390927] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b73624-5037-49f0-9a5a-2a23dc2f734e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.398036] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3973e528-5a61-43cc-9127-aba2a68b18a4 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.407451] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28b71849-d0ef-443c-b41e-9f87d47bfb92 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.438928] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca93947a-aeb8-48cf-8f0f-bfaf9f74484d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.445014] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-fe701e88-a631-4824-847f-59b7ed99dcaa {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.455839] env[68798]: DEBUG oslo_vmware.api [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Task: {'id': task-4217695, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067909} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.456125] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1819.456360] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1819.456551] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1819.456726] env[68798]: INFO nova.compute.manager [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1819.458980] env[68798]: DEBUG nova.compute.claims [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1819.459115] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.459327] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.468859] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1819.524083] env[68798]: DEBUG oslo_vmware.rw_handles [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2ae7ed80-a96e-4e1b-8231-9b45f6b360bc/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1819.585489] env[68798]: DEBUG oslo_vmware.rw_handles [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1819.587019] env[68798]: DEBUG oslo_vmware.rw_handles [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2ae7ed80-a96e-4e1b-8231-9b45f6b360bc/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1819.713015] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fb62e6c-fd78-4d9d-89f7-d8a2cb23ebfd {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.721633] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d48034f-ab86-4196-b553-1fe52f7d7740 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.751740] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c97f82e9-7779-4031-97b7-8ccbf3d9b189 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.759765] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99dc5625-6d37-49d2-abbb-da2eb2819e05 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.773622] env[68798]: DEBUG nova.compute.provider_tree [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1819.784432] env[68798]: DEBUG nova.scheduler.client.report [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1819.798624] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.339s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.799190] env[68798]: ERROR nova.compute.manager [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1819.799190] env[68798]: Faults: ['InvalidArgument'] [ 1819.799190] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Traceback (most recent call last): [ 1819.799190] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1819.799190] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] self.driver.spawn(context, instance, image_meta, [ 1819.799190] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1819.799190] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1819.799190] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1819.799190] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] self._fetch_image_if_missing(context, vi) [ 1819.799190] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1819.799190] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] image_cache(vi, tmp_image_ds_loc) [ 1819.799190] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1819.799190] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] vm_util.copy_virtual_disk( [ 1819.799190] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1819.799190] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] session._wait_for_task(vmdk_copy_task) [ 1819.799190] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1819.799190] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] return self.wait_for_task(task_ref) [ 1819.799190] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1819.799190] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] return evt.wait() [ 1819.799190] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1819.799190] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] result = hub.switch() [ 1819.799190] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1819.799190] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] return self.greenlet.switch() [ 1819.799190] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1819.799190] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] self.f(*self.args, **self.kw) [ 1819.799190] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1819.799190] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] raise exceptions.translate_fault(task_info.error) [ 1819.799190] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1819.799190] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Faults: ['InvalidArgument'] [ 1819.799190] env[68798]: ERROR nova.compute.manager [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] [ 1819.800194] env[68798]: DEBUG nova.compute.utils [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1819.801502] env[68798]: DEBUG nova.compute.manager [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Build of instance cbe4e626-f063-4877-985f-b3e36c161c9e was re-scheduled: A specified parameter was not correct: fileType [ 1819.801502] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1819.801868] env[68798]: DEBUG nova.compute.manager [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1819.802054] env[68798]: DEBUG nova.compute.manager [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1819.802231] env[68798]: DEBUG nova.compute.manager [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1819.802443] env[68798]: DEBUG nova.network.neutron [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1820.089655] env[68798]: DEBUG nova.network.neutron [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1820.100249] env[68798]: INFO nova.compute.manager [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Took 0.30 seconds to deallocate network for instance. [ 1820.200348] env[68798]: INFO nova.scheduler.client.report [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Deleted allocations for instance cbe4e626-f063-4877-985f-b3e36c161c9e [ 1820.223999] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7d736ce9-4d96-4fa1-8090-d5f84ffd149e tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "cbe4e626-f063-4877-985f-b3e36c161c9e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 592.948s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.225158] env[68798]: DEBUG oslo_concurrency.lockutils [None req-28562a7b-d6db-4eaf-9bee-c1b3aeeb7c89 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "cbe4e626-f063-4877-985f-b3e36c161c9e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 397.251s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.225485] env[68798]: DEBUG oslo_concurrency.lockutils [None req-28562a7b-d6db-4eaf-9bee-c1b3aeeb7c89 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquiring lock "cbe4e626-f063-4877-985f-b3e36c161c9e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1820.225720] env[68798]: DEBUG oslo_concurrency.lockutils [None req-28562a7b-d6db-4eaf-9bee-c1b3aeeb7c89 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "cbe4e626-f063-4877-985f-b3e36c161c9e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.225891] env[68798]: DEBUG oslo_concurrency.lockutils [None req-28562a7b-d6db-4eaf-9bee-c1b3aeeb7c89 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "cbe4e626-f063-4877-985f-b3e36c161c9e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.228050] env[68798]: INFO nova.compute.manager [None req-28562a7b-d6db-4eaf-9bee-c1b3aeeb7c89 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Terminating instance [ 1820.229882] env[68798]: DEBUG nova.compute.manager [None req-28562a7b-d6db-4eaf-9bee-c1b3aeeb7c89 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1820.230088] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-28562a7b-d6db-4eaf-9bee-c1b3aeeb7c89 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1820.230570] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2e7d905e-f718-4ac6-9ae9-3911cfecf0e4 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.241380] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9485072-4c06-4263-90eb-db963d70d604 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.252007] env[68798]: DEBUG nova.compute.manager [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1820.274477] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-28562a7b-d6db-4eaf-9bee-c1b3aeeb7c89 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cbe4e626-f063-4877-985f-b3e36c161c9e could not be found. [ 1820.274702] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-28562a7b-d6db-4eaf-9bee-c1b3aeeb7c89 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1820.274880] env[68798]: INFO nova.compute.manager [None req-28562a7b-d6db-4eaf-9bee-c1b3aeeb7c89 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1820.275153] env[68798]: DEBUG oslo.service.loopingcall [None req-28562a7b-d6db-4eaf-9bee-c1b3aeeb7c89 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1820.275377] env[68798]: DEBUG nova.compute.manager [-] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1820.275489] env[68798]: DEBUG nova.network.neutron [-] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1820.297566] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1820.297820] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.299379] env[68798]: INFO nova.compute.claims [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1820.302086] env[68798]: DEBUG nova.network.neutron [-] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1820.311348] env[68798]: INFO nova.compute.manager [-] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] Took 0.04 seconds to deallocate network for instance. [ 1820.412008] env[68798]: DEBUG oslo_concurrency.lockutils [None req-28562a7b-d6db-4eaf-9bee-c1b3aeeb7c89 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "cbe4e626-f063-4877-985f-b3e36c161c9e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.187s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.412748] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "cbe4e626-f063-4877-985f-b3e36c161c9e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 31.315s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.412946] env[68798]: INFO nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: cbe4e626-f063-4877-985f-b3e36c161c9e] During sync_power_state the instance has a pending task (deleting). Skip. [ 1820.413152] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "cbe4e626-f063-4877-985f-b3e36c161c9e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.505381] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9cc3247-6cb5-4b62-903d-2fc55457682f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.513451] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb51926-48f6-4c84-a91a-feb1cd11f48b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.545437] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66adbe4f-0139-4d20-b99c-03d89952de0e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.553641] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4534d8b-ffd6-4140-b02c-3d9f3ceb638f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.568747] env[68798]: DEBUG nova.compute.provider_tree [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1820.579385] env[68798]: DEBUG nova.scheduler.client.report [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1820.594453] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.297s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.595048] env[68798]: DEBUG nova.compute.manager [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1820.628706] env[68798]: DEBUG nova.compute.utils [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1820.630384] env[68798]: DEBUG nova.compute.manager [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1820.630576] env[68798]: DEBUG nova.network.neutron [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1820.640070] env[68798]: DEBUG nova.compute.manager [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1820.710096] env[68798]: DEBUG nova.compute.manager [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1820.727869] env[68798]: DEBUG nova.policy [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '65a77c1ac9ef415a96027982683a9143', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e85d147593f4fd09907df0b9531de65', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 1820.738248] env[68798]: DEBUG nova.virt.hardware [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1820.738521] env[68798]: DEBUG nova.virt.hardware [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1820.738719] env[68798]: DEBUG nova.virt.hardware [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1820.738913] env[68798]: DEBUG nova.virt.hardware [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1820.739080] env[68798]: DEBUG nova.virt.hardware [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1820.739235] env[68798]: DEBUG nova.virt.hardware [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1820.739448] env[68798]: DEBUG nova.virt.hardware [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1820.739609] env[68798]: DEBUG nova.virt.hardware [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1820.739792] env[68798]: DEBUG nova.virt.hardware [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1820.740151] env[68798]: DEBUG nova.virt.hardware [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1820.740151] env[68798]: DEBUG nova.virt.hardware [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1820.741024] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b21d071c-d08d-4be2-a18f-cdf42fe2bf10 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.749841] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7332cd59-f3a2-4279-9050-12a10adf6d9a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.064083] env[68798]: DEBUG nova.network.neutron [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Successfully created port: 3e5c979a-ffa5-4f33-af35-49f98eabb3d7 {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1821.825690] env[68798]: DEBUG nova.network.neutron [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Successfully updated port: 3e5c979a-ffa5-4f33-af35-49f98eabb3d7 {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1821.837620] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Acquiring lock "refresh_cache-ce408b93-3713-4819-8c80-63735d9a5467" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1821.837791] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Acquired lock "refresh_cache-ce408b93-3713-4819-8c80-63735d9a5467" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1821.837947] env[68798]: DEBUG nova.network.neutron [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1821.896740] env[68798]: DEBUG nova.network.neutron [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1822.128351] env[68798]: DEBUG nova.compute.manager [req-27187ed8-8f68-435e-81cf-dde50e37feb9 req-8f790578-8ef4-40f1-9acc-fc95e80dc7f4 service nova] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Received event network-vif-plugged-3e5c979a-ffa5-4f33-af35-49f98eabb3d7 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1822.128618] env[68798]: DEBUG oslo_concurrency.lockutils [req-27187ed8-8f68-435e-81cf-dde50e37feb9 req-8f790578-8ef4-40f1-9acc-fc95e80dc7f4 service nova] Acquiring lock "ce408b93-3713-4819-8c80-63735d9a5467-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1822.128907] env[68798]: DEBUG oslo_concurrency.lockutils [req-27187ed8-8f68-435e-81cf-dde50e37feb9 req-8f790578-8ef4-40f1-9acc-fc95e80dc7f4 service nova] Lock "ce408b93-3713-4819-8c80-63735d9a5467-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1822.129168] env[68798]: DEBUG oslo_concurrency.lockutils [req-27187ed8-8f68-435e-81cf-dde50e37feb9 req-8f790578-8ef4-40f1-9acc-fc95e80dc7f4 service nova] Lock "ce408b93-3713-4819-8c80-63735d9a5467-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1822.129411] env[68798]: DEBUG nova.compute.manager [req-27187ed8-8f68-435e-81cf-dde50e37feb9 req-8f790578-8ef4-40f1-9acc-fc95e80dc7f4 service nova] [instance: ce408b93-3713-4819-8c80-63735d9a5467] No waiting events found dispatching network-vif-plugged-3e5c979a-ffa5-4f33-af35-49f98eabb3d7 {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1822.129647] env[68798]: WARNING nova.compute.manager [req-27187ed8-8f68-435e-81cf-dde50e37feb9 req-8f790578-8ef4-40f1-9acc-fc95e80dc7f4 service nova] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Received unexpected event network-vif-plugged-3e5c979a-ffa5-4f33-af35-49f98eabb3d7 for instance with vm_state building and task_state spawning. [ 1822.129901] env[68798]: DEBUG nova.compute.manager [req-27187ed8-8f68-435e-81cf-dde50e37feb9 req-8f790578-8ef4-40f1-9acc-fc95e80dc7f4 service nova] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Received event network-changed-3e5c979a-ffa5-4f33-af35-49f98eabb3d7 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1822.130111] env[68798]: DEBUG nova.compute.manager [req-27187ed8-8f68-435e-81cf-dde50e37feb9 req-8f790578-8ef4-40f1-9acc-fc95e80dc7f4 service nova] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Refreshing instance network info cache due to event network-changed-3e5c979a-ffa5-4f33-af35-49f98eabb3d7. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1822.130358] env[68798]: DEBUG oslo_concurrency.lockutils [req-27187ed8-8f68-435e-81cf-dde50e37feb9 req-8f790578-8ef4-40f1-9acc-fc95e80dc7f4 service nova] Acquiring lock "refresh_cache-ce408b93-3713-4819-8c80-63735d9a5467" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1822.169287] env[68798]: DEBUG nova.network.neutron [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Updating instance_info_cache with network_info: [{"id": "3e5c979a-ffa5-4f33-af35-49f98eabb3d7", "address": "fa:16:3e:ff:ef:c1", "network": {"id": "62909948-dd00-4450-8d65-a77715843abd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-421236368-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e85d147593f4fd09907df0b9531de65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e5c979a-ff", "ovs_interfaceid": "3e5c979a-ffa5-4f33-af35-49f98eabb3d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1822.183142] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Releasing lock "refresh_cache-ce408b93-3713-4819-8c80-63735d9a5467" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1822.183473] env[68798]: DEBUG nova.compute.manager [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Instance network_info: |[{"id": "3e5c979a-ffa5-4f33-af35-49f98eabb3d7", "address": "fa:16:3e:ff:ef:c1", "network": {"id": "62909948-dd00-4450-8d65-a77715843abd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-421236368-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e85d147593f4fd09907df0b9531de65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e5c979a-ff", "ovs_interfaceid": "3e5c979a-ffa5-4f33-af35-49f98eabb3d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1822.183779] env[68798]: DEBUG oslo_concurrency.lockutils [req-27187ed8-8f68-435e-81cf-dde50e37feb9 req-8f790578-8ef4-40f1-9acc-fc95e80dc7f4 service nova] Acquired lock "refresh_cache-ce408b93-3713-4819-8c80-63735d9a5467" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.183961] env[68798]: DEBUG nova.network.neutron [req-27187ed8-8f68-435e-81cf-dde50e37feb9 req-8f790578-8ef4-40f1-9acc-fc95e80dc7f4 service nova] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Refreshing network info cache for port 3e5c979a-ffa5-4f33-af35-49f98eabb3d7 {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1822.186056] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ff:ef:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5ce1511e-1eaa-45c6-a1ef-2b714c814fa1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3e5c979a-ffa5-4f33-af35-49f98eabb3d7', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1822.193537] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Creating folder: Project (8e85d147593f4fd09907df0b9531de65). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1822.196639] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-341a5ae3-55fb-450c-9db4-0ec7b48e7cb8 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.209132] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Created folder: Project (8e85d147593f4fd09907df0b9531de65) in parent group-v834492. [ 1822.209499] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Creating folder: Instances. Parent ref: group-v834592. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1822.209876] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3eff51c3-332e-4f1d-b065-a6af9adb90a4 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.223775] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Created folder: Instances in parent group-v834592. [ 1822.224080] env[68798]: DEBUG oslo.service.loopingcall [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1822.224295] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1822.224600] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-45d253cb-aad1-48b6-b918-6f7c0656e075 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.248113] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1822.248113] env[68798]: value = "task-4217698" [ 1822.248113] env[68798]: _type = "Task" [ 1822.248113] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.256679] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217698, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.558392] env[68798]: DEBUG nova.network.neutron [req-27187ed8-8f68-435e-81cf-dde50e37feb9 req-8f790578-8ef4-40f1-9acc-fc95e80dc7f4 service nova] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Updated VIF entry in instance network info cache for port 3e5c979a-ffa5-4f33-af35-49f98eabb3d7. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1822.558908] env[68798]: DEBUG nova.network.neutron [req-27187ed8-8f68-435e-81cf-dde50e37feb9 req-8f790578-8ef4-40f1-9acc-fc95e80dc7f4 service nova] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Updating instance_info_cache with network_info: [{"id": "3e5c979a-ffa5-4f33-af35-49f98eabb3d7", "address": "fa:16:3e:ff:ef:c1", "network": {"id": "62909948-dd00-4450-8d65-a77715843abd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-421236368-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e85d147593f4fd09907df0b9531de65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ce1511e-1eaa-45c6-a1ef-2b714c814fa1", "external-id": "nsx-vlan-transportzone-300", "segmentation_id": 300, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e5c979a-ff", "ovs_interfaceid": "3e5c979a-ffa5-4f33-af35-49f98eabb3d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1822.571663] env[68798]: DEBUG oslo_concurrency.lockutils [req-27187ed8-8f68-435e-81cf-dde50e37feb9 req-8f790578-8ef4-40f1-9acc-fc95e80dc7f4 service nova] Releasing lock "refresh_cache-ce408b93-3713-4819-8c80-63735d9a5467" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1822.758921] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217698, 'name': CreateVM_Task, 'duration_secs': 0.305292} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.760081] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1822.760081] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1822.760271] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.760607] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1822.760863] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e27177df-f1ff-4515-8094-8eef33e56e02 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.765792] env[68798]: DEBUG oslo_vmware.api [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Waiting for the task: (returnval){ [ 1822.765792] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52baac96-de08-8416-edd5-7211a4356e33" [ 1822.765792] env[68798]: _type = "Task" [ 1822.765792] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.775587] env[68798]: DEBUG oslo_vmware.api [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52baac96-de08-8416-edd5-7211a4356e33, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.277998] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1823.278385] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1823.278701] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1831.048308] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1831.048627] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1831.048777] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1831.048930] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1832.048729] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1833.043701] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1833.048451] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1833.048639] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1833.048779] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1833.073022] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1833.073292] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1833.073512] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1833.073741] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1833.073964] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1833.074193] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1833.074405] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1833.074607] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1833.074822] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1833.075046] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1833.075232] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1833.075741] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1838.047993] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1841.048360] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1841.060018] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1841.060277] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.060462] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.060623] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1841.061770] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1371e0f-aa9b-41f2-ac49-8daa87cae26a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.071291] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d58e8ca-175c-4663-92a2-e6b6edfef7cb {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.086537] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29c4922b-d0eb-4f3c-be4f-5afc23e3ee26 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.092872] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88aed516-54b5-4222-b620-d4bbe6ea2a42 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.121121] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180717MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1841.121272] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1841.121464] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.195126] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 17cce398-d2f8-47a6-b714-c4e54caec516 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1841.195304] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ca976c34-4eb0-46aa-a243-91401f842c32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1841.195433] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 36980008-f639-4c88-afcf-0dba40420b87 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1841.195561] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance b430775d-fcfb-4233-bc78-87d279e82fb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1841.195683] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 71c99eda-d55d-4d60-92d2-a5553c3c3760 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1841.195802] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance a4e41ed1-2b39-4475-bd13-1680ff46ff6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1841.195932] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ab6c3a4b-2208-49c8-b92f-1f08c0b225f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1841.196086] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 57a34323-ebdd-4495-ab62-f7b82ab804d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1841.196212] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 6f0e769a-33db-48c6-9a88-cceb310cb819 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1841.196327] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ce408b93-3713-4819-8c80-63735d9a5467 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1841.207589] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance de697c7a-bcc4-4d01-a9ec-8467e89d4ada has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1841.207928] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1841.208286] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1841.343141] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-570590e0-3612-4372-a830-0fabfad9e27a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.351105] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2615d491-1334-47e0-9b94-2dc26ad13eca {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.382636] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-042c5789-818e-487b-b0a3-196e2fb01257 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.390422] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbb9c59a-b734-496e-9f04-442a3048331e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.403584] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1841.414116] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1841.427500] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1841.427687] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.306s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1848.744652] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e1a4b0a8-2b22-414a-96bf-f47ea06e66d3 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Acquiring lock "ce408b93-3713-4819-8c80-63735d9a5467" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1868.338026] env[68798]: WARNING oslo_vmware.rw_handles [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1868.338026] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1868.338026] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1868.338026] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1868.338026] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1868.338026] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 1868.338026] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1868.338026] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1868.338026] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1868.338026] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1868.338026] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1868.338026] env[68798]: ERROR oslo_vmware.rw_handles [ 1868.338754] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/2ae7ed80-a96e-4e1b-8231-9b45f6b360bc/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1868.340527] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1868.340772] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Copying Virtual Disk [datastore1] vmware_temp/2ae7ed80-a96e-4e1b-8231-9b45f6b360bc/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/2ae7ed80-a96e-4e1b-8231-9b45f6b360bc/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1868.341066] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-10080b15-5899-4016-9f93-e9d1e47502bb {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.350197] env[68798]: DEBUG oslo_vmware.api [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Waiting for the task: (returnval){ [ 1868.350197] env[68798]: value = "task-4217699" [ 1868.350197] env[68798]: _type = "Task" [ 1868.350197] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.361518] env[68798]: DEBUG oslo_vmware.api [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Task: {'id': task-4217699, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.860797] env[68798]: DEBUG oslo_vmware.exceptions [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1868.861178] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1868.861785] env[68798]: ERROR nova.compute.manager [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1868.861785] env[68798]: Faults: ['InvalidArgument'] [ 1868.861785] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Traceback (most recent call last): [ 1868.861785] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1868.861785] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] yield resources [ 1868.861785] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1868.861785] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] self.driver.spawn(context, instance, image_meta, [ 1868.861785] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1868.861785] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1868.861785] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1868.861785] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] self._fetch_image_if_missing(context, vi) [ 1868.861785] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1868.861785] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] image_cache(vi, tmp_image_ds_loc) [ 1868.861785] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1868.861785] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] vm_util.copy_virtual_disk( [ 1868.861785] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1868.861785] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] session._wait_for_task(vmdk_copy_task) [ 1868.861785] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1868.861785] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] return self.wait_for_task(task_ref) [ 1868.861785] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1868.861785] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] return evt.wait() [ 1868.861785] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1868.861785] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] result = hub.switch() [ 1868.861785] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1868.861785] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] return self.greenlet.switch() [ 1868.861785] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1868.861785] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] self.f(*self.args, **self.kw) [ 1868.861785] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1868.861785] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] raise exceptions.translate_fault(task_info.error) [ 1868.861785] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1868.861785] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Faults: ['InvalidArgument'] [ 1868.861785] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] [ 1868.863372] env[68798]: INFO nova.compute.manager [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Terminating instance [ 1868.864778] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1868.864778] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1868.864778] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c20b4aad-e5c0-4e61-b4f8-4909b21960b2 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.866874] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquiring lock "refresh_cache-17cce398-d2f8-47a6-b714-c4e54caec516" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1868.867084] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquired lock "refresh_cache-17cce398-d2f8-47a6-b714-c4e54caec516" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1868.867451] env[68798]: DEBUG nova.network.neutron [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1868.874456] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1868.874668] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1868.875999] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-873c34f7-9fd8-447c-b231-eedb2228eba3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.884126] env[68798]: DEBUG oslo_vmware.api [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Waiting for the task: (returnval){ [ 1868.884126] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]526900e9-f039-a315-bb1d-2b5790c83d34" [ 1868.884126] env[68798]: _type = "Task" [ 1868.884126] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.892308] env[68798]: DEBUG oslo_vmware.api [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]526900e9-f039-a315-bb1d-2b5790c83d34, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.900752] env[68798]: DEBUG nova.network.neutron [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1868.988343] env[68798]: DEBUG nova.network.neutron [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1868.997938] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Releasing lock "refresh_cache-17cce398-d2f8-47a6-b714-c4e54caec516" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1868.998404] env[68798]: DEBUG nova.compute.manager [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1868.998608] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1868.999977] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c714eb0-0843-4db7-9f9a-d908502dfeb2 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.009484] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1869.009765] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-76f670a6-1a6c-4384-8070-72a77998f763 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.085764] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1869.085979] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1869.086192] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Deleting the datastore file [datastore1] 17cce398-d2f8-47a6-b714-c4e54caec516 {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1869.086452] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6e7b63dd-3246-40eb-bd5f-3aa56941c107 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.093469] env[68798]: DEBUG oslo_vmware.api [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Waiting for the task: (returnval){ [ 1869.093469] env[68798]: value = "task-4217701" [ 1869.093469] env[68798]: _type = "Task" [ 1869.093469] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.101877] env[68798]: DEBUG oslo_vmware.api [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Task: {'id': task-4217701, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.395037] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1869.395414] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Creating directory with path [datastore1] vmware_temp/21a8c23c-dc66-4dfc-bfb4-4912b9631832/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1869.395574] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f38fe67b-04a9-47c8-84b3-1af6f5de3c77 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.407810] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Created directory with path [datastore1] vmware_temp/21a8c23c-dc66-4dfc-bfb4-4912b9631832/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1869.408015] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Fetch image to [datastore1] vmware_temp/21a8c23c-dc66-4dfc-bfb4-4912b9631832/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1869.408202] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/21a8c23c-dc66-4dfc-bfb4-4912b9631832/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1869.409020] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adfc19f5-7773-418b-922b-8ec5c0b1034f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.416330] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30ce4ed7-beb6-4e47-a19a-c9fac08c8d1c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.425731] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14cda5fc-7cb2-4b0a-b436-06fd8b2310d3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.456603] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c293e11-b698-47a1-9995-ee84f8212cb9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.462950] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-db74d225-11e0-4acb-bf6d-4778ec4f9b25 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.485401] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1869.603952] env[68798]: DEBUG oslo_vmware.api [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Task: {'id': task-4217701, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.045879} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.604638] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1869.604638] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1869.604797] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1869.605044] env[68798]: INFO nova.compute.manager [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1869.605315] env[68798]: DEBUG oslo.service.loopingcall [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1869.605526] env[68798]: DEBUG nova.compute.manager [-] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Skipping network deallocation for instance since networking was not requested. {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1869.608320] env[68798]: DEBUG nova.compute.claims [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1869.608320] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1869.608320] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1869.611454] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1869.612200] env[68798]: ERROR nova.compute.manager [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70. [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Traceback (most recent call last): [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] result = getattr(controller, method)(*args, **kwargs) [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] return self._get(image_id) [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] resp, body = self.http_client.get(url, headers=header) [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] return self.request(url, 'GET', **kwargs) [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] return self._handle_response(resp) [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] raise exc.from_response(resp, resp.content) [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] During handling of the above exception, another exception occurred: [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Traceback (most recent call last): [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] yield resources [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] self.driver.spawn(context, instance, image_meta, [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] self._fetch_image_if_missing(context, vi) [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] image_fetch(context, vi, tmp_image_ds_loc) [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] images.fetch_image( [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1869.612200] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] metadata = IMAGE_API.get(context, image_ref) [ 1869.613690] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1869.613690] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] return session.show(context, image_id, [ 1869.613690] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1869.613690] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] _reraise_translated_image_exception(image_id) [ 1869.613690] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1869.613690] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] raise new_exc.with_traceback(exc_trace) [ 1869.613690] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1869.613690] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1869.613690] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1869.613690] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] result = getattr(controller, method)(*args, **kwargs) [ 1869.613690] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1869.613690] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] return self._get(image_id) [ 1869.613690] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1869.613690] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1869.613690] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1869.613690] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] resp, body = self.http_client.get(url, headers=header) [ 1869.613690] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1869.613690] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] return self.request(url, 'GET', **kwargs) [ 1869.613690] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1869.613690] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] return self._handle_response(resp) [ 1869.613690] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1869.613690] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] raise exc.from_response(resp, resp.content) [ 1869.613690] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] nova.exception.ImageNotAuthorized: Not authorized for image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70. [ 1869.613690] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] [ 1869.613690] env[68798]: INFO nova.compute.manager [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Terminating instance [ 1869.614484] env[68798]: DEBUG oslo_concurrency.lockutils [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1869.614484] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1869.614592] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquiring lock "refresh_cache-ca976c34-4eb0-46aa-a243-91401f842c32" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1869.614783] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquired lock "refresh_cache-ca976c34-4eb0-46aa-a243-91401f842c32" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1869.614842] env[68798]: DEBUG nova.network.neutron [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1869.615856] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ed9d07b-1ccf-4f44-b829-19a5a2b807ea {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.626897] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1869.627101] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1869.628191] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32d95157-72a1-4134-bdbb-c6a5175b9cda {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.634313] env[68798]: DEBUG oslo_vmware.api [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Waiting for the task: (returnval){ [ 1869.634313] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]5273fc0e-a4b1-b6be-3502-6d2b10a0e9ca" [ 1869.634313] env[68798]: _type = "Task" [ 1869.634313] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.644953] env[68798]: DEBUG nova.network.neutron [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1869.655839] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1869.655839] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Creating directory with path [datastore1] vmware_temp/f05176f6-408c-4e63-9a59-3afcead9a432/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1869.656302] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-457e42c6-73a7-405b-8622-526600503d35 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.678465] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Created directory with path [datastore1] vmware_temp/f05176f6-408c-4e63-9a59-3afcead9a432/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1869.678685] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Fetch image to [datastore1] vmware_temp/f05176f6-408c-4e63-9a59-3afcead9a432/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1869.678859] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/f05176f6-408c-4e63-9a59-3afcead9a432/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1869.679676] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29aab16a-a38e-4a1f-b301-a1d48273ac17 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.687336] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-249f8997-ddc3-4b31-a088-bc04991a140d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.700069] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e2b37d5-8db0-4671-92c1-44072cc58052 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.734643] env[68798]: DEBUG nova.network.neutron [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1869.738650] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-159ec045-6533-45bb-a5d8-9e501a99defc {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.744543] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Releasing lock "refresh_cache-ca976c34-4eb0-46aa-a243-91401f842c32" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1869.744955] env[68798]: DEBUG nova.compute.manager [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1869.745168] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1869.747833] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0594aa65-27fa-4823-a2c2-6b94f0d30650 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.750810] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e21dce10-0346-4a01-9dac-8d1a0b5256cf {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.759008] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1869.759268] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2e16fc84-a1c8-4da5-984a-9d93a7dc3922 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.781520] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1869.788682] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1869.788860] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1869.789060] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Deleting the datastore file [datastore1] ca976c34-4eb0-46aa-a243-91401f842c32 {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1869.789321] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cdc746c8-958a-4760-8741-83c2045f210d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.796460] env[68798]: DEBUG oslo_vmware.api [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Waiting for the task: (returnval){ [ 1869.796460] env[68798]: value = "task-4217703" [ 1869.796460] env[68798]: _type = "Task" [ 1869.796460] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.805491] env[68798]: DEBUG oslo_vmware.api [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Task: {'id': task-4217703, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.839194] env[68798]: DEBUG oslo_vmware.rw_handles [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f05176f6-408c-4e63-9a59-3afcead9a432/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1869.841806] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d379f1d-81da-4a21-9f44-7e3475c90a6b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.898686] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59b727dc-69f3-4485-824c-9bdc12955e06 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.905882] env[68798]: DEBUG oslo_vmware.rw_handles [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1869.906101] env[68798]: DEBUG oslo_vmware.rw_handles [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f05176f6-408c-4e63-9a59-3afcead9a432/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1869.932262] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-657e5a8d-7d81-4d55-99bf-b85c2e3b22ab {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.940650] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a5db39-94f1-4b05-884e-c10eb6de75ce {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.954795] env[68798]: DEBUG nova.compute.provider_tree [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1869.969917] env[68798]: DEBUG nova.scheduler.client.report [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1869.991649] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.383s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1869.992214] env[68798]: ERROR nova.compute.manager [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1869.992214] env[68798]: Faults: ['InvalidArgument'] [ 1869.992214] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Traceback (most recent call last): [ 1869.992214] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1869.992214] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] self.driver.spawn(context, instance, image_meta, [ 1869.992214] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1869.992214] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1869.992214] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1869.992214] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] self._fetch_image_if_missing(context, vi) [ 1869.992214] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1869.992214] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] image_cache(vi, tmp_image_ds_loc) [ 1869.992214] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1869.992214] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] vm_util.copy_virtual_disk( [ 1869.992214] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1869.992214] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] session._wait_for_task(vmdk_copy_task) [ 1869.992214] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1869.992214] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] return self.wait_for_task(task_ref) [ 1869.992214] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1869.992214] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] return evt.wait() [ 1869.992214] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1869.992214] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] result = hub.switch() [ 1869.992214] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1869.992214] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] return self.greenlet.switch() [ 1869.992214] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1869.992214] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] self.f(*self.args, **self.kw) [ 1869.992214] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1869.992214] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] raise exceptions.translate_fault(task_info.error) [ 1869.992214] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1869.992214] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Faults: ['InvalidArgument'] [ 1869.992214] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] [ 1869.993930] env[68798]: DEBUG nova.compute.utils [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1869.994799] env[68798]: DEBUG nova.compute.manager [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Build of instance 17cce398-d2f8-47a6-b714-c4e54caec516 was re-scheduled: A specified parameter was not correct: fileType [ 1869.994799] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1869.995190] env[68798]: DEBUG nova.compute.manager [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1869.995415] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquiring lock "refresh_cache-17cce398-d2f8-47a6-b714-c4e54caec516" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1869.995565] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquired lock "refresh_cache-17cce398-d2f8-47a6-b714-c4e54caec516" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1869.995732] env[68798]: DEBUG nova.network.neutron [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1870.025530] env[68798]: DEBUG nova.network.neutron [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1870.090249] env[68798]: DEBUG nova.network.neutron [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1870.099976] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Releasing lock "refresh_cache-17cce398-d2f8-47a6-b714-c4e54caec516" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1870.100237] env[68798]: DEBUG nova.compute.manager [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1870.100428] env[68798]: DEBUG nova.compute.manager [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Skipping network deallocation for instance since networking was not requested. {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1870.197354] env[68798]: INFO nova.scheduler.client.report [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Deleted allocations for instance 17cce398-d2f8-47a6-b714-c4e54caec516 [ 1870.221019] env[68798]: DEBUG oslo_concurrency.lockutils [None req-8748eab4-f282-4263-85b4-fd0d520d9d0d tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Lock "17cce398-d2f8-47a6-b714-c4e54caec516" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 629.892s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.221252] env[68798]: DEBUG oslo_concurrency.lockutils [None req-59c5c1db-40ce-4212-8ef5-7887e356ab15 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Lock "17cce398-d2f8-47a6-b714-c4e54caec516" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 237.011s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.221471] env[68798]: DEBUG oslo_concurrency.lockutils [None req-59c5c1db-40ce-4212-8ef5-7887e356ab15 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquiring lock "17cce398-d2f8-47a6-b714-c4e54caec516-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.221668] env[68798]: DEBUG oslo_concurrency.lockutils [None req-59c5c1db-40ce-4212-8ef5-7887e356ab15 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Lock "17cce398-d2f8-47a6-b714-c4e54caec516-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.221830] env[68798]: DEBUG oslo_concurrency.lockutils [None req-59c5c1db-40ce-4212-8ef5-7887e356ab15 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Lock "17cce398-d2f8-47a6-b714-c4e54caec516-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.224149] env[68798]: INFO nova.compute.manager [None req-59c5c1db-40ce-4212-8ef5-7887e356ab15 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Terminating instance [ 1870.225713] env[68798]: DEBUG oslo_concurrency.lockutils [None req-59c5c1db-40ce-4212-8ef5-7887e356ab15 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquiring lock "refresh_cache-17cce398-d2f8-47a6-b714-c4e54caec516" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1870.225902] env[68798]: DEBUG oslo_concurrency.lockutils [None req-59c5c1db-40ce-4212-8ef5-7887e356ab15 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquired lock "refresh_cache-17cce398-d2f8-47a6-b714-c4e54caec516" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1870.226105] env[68798]: DEBUG nova.network.neutron [None req-59c5c1db-40ce-4212-8ef5-7887e356ab15 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1870.230939] env[68798]: DEBUG nova.compute.manager [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1870.253560] env[68798]: DEBUG nova.network.neutron [None req-59c5c1db-40ce-4212-8ef5-7887e356ab15 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1870.286491] env[68798]: DEBUG oslo_concurrency.lockutils [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.286744] env[68798]: DEBUG oslo_concurrency.lockutils [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.288242] env[68798]: INFO nova.compute.claims [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1870.306476] env[68798]: DEBUG oslo_vmware.api [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Task: {'id': task-4217703, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.034722} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.306717] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1870.306902] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1870.307088] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1870.307264] env[68798]: INFO nova.compute.manager [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Took 0.56 seconds to destroy the instance on the hypervisor. [ 1870.307658] env[68798]: DEBUG oslo.service.loopingcall [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1870.308050] env[68798]: DEBUG nova.compute.manager [-] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Skipping network deallocation for instance since networking was not requested. {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1870.310187] env[68798]: DEBUG nova.compute.claims [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1870.310364] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.319395] env[68798]: DEBUG nova.network.neutron [None req-59c5c1db-40ce-4212-8ef5-7887e356ab15 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1870.328890] env[68798]: DEBUG oslo_concurrency.lockutils [None req-59c5c1db-40ce-4212-8ef5-7887e356ab15 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Releasing lock "refresh_cache-17cce398-d2f8-47a6-b714-c4e54caec516" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1870.329426] env[68798]: DEBUG nova.compute.manager [None req-59c5c1db-40ce-4212-8ef5-7887e356ab15 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1870.329626] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-59c5c1db-40ce-4212-8ef5-7887e356ab15 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1870.330128] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a423578a-45ea-4b25-bab7-f178513a1a4b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.341329] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae6064b3-f93b-4113-abd8-9e3887a8292c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.375499] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-59c5c1db-40ce-4212-8ef5-7887e356ab15 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 17cce398-d2f8-47a6-b714-c4e54caec516 could not be found. [ 1870.375707] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-59c5c1db-40ce-4212-8ef5-7887e356ab15 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1870.375888] env[68798]: INFO nova.compute.manager [None req-59c5c1db-40ce-4212-8ef5-7887e356ab15 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1870.376224] env[68798]: DEBUG oslo.service.loopingcall [None req-59c5c1db-40ce-4212-8ef5-7887e356ab15 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1870.376382] env[68798]: DEBUG nova.compute.manager [-] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1870.376479] env[68798]: DEBUG nova.network.neutron [-] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1870.478598] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fdbbc51-9a1f-468d-9cbc-5c2f9a5218bc {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.481110] env[68798]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68798) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1870.481352] env[68798]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-ba6bd05b-81db-4be3-be4a-bb8b27ccaa73'] [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1870.481848] env[68798]: ERROR oslo.service.loopingcall [ 1870.483211] env[68798]: ERROR nova.compute.manager [None req-59c5c1db-40ce-4212-8ef5-7887e356ab15 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1870.490380] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc7eb02b-b0fb-4def-98d5-4ca866bfcf52 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.523115] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a53d11f-c5ad-4b08-84ad-ca3f41e43ee0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.526228] env[68798]: ERROR nova.compute.manager [None req-59c5c1db-40ce-4212-8ef5-7887e356ab15 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Traceback (most recent call last): [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] ret = obj(*args, **kwargs) [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] exception_handler_v20(status_code, error_body) [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] raise client_exc(message=error_message, [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Neutron server returns request_ids: ['req-ba6bd05b-81db-4be3-be4a-bb8b27ccaa73'] [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] During handling of the above exception, another exception occurred: [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Traceback (most recent call last): [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] self._delete_instance(context, instance, bdms) [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] self._shutdown_instance(context, instance, bdms) [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] self._try_deallocate_network(context, instance, requested_networks) [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] with excutils.save_and_reraise_exception(): [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] self.force_reraise() [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] raise self.value [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] _deallocate_network_with_retries() [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] return evt.wait() [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] result = hub.switch() [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] return self.greenlet.switch() [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] result = func(*self.args, **self.kw) [ 1870.526228] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] result = f(*args, **kwargs) [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] self._deallocate_network( [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] self.network_api.deallocate_for_instance( [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] data = neutron.list_ports(**search_opts) [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] ret = obj(*args, **kwargs) [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] return self.list('ports', self.ports_path, retrieve_all, [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] ret = obj(*args, **kwargs) [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] for r in self._pagination(collection, path, **params): [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] res = self.get(path, params=params) [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] ret = obj(*args, **kwargs) [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] return self.retry_request("GET", action, body=body, [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] ret = obj(*args, **kwargs) [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] return self.do_request(method, action, body=body, [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] ret = obj(*args, **kwargs) [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] self._handle_fault_response(status_code, replybody, resp) [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1870.527503] env[68798]: ERROR nova.compute.manager [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] [ 1870.535659] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b0b541-7c6b-442a-9a55-c84ae0084663 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.552444] env[68798]: DEBUG nova.compute.provider_tree [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1870.557507] env[68798]: DEBUG oslo_concurrency.lockutils [None req-59c5c1db-40ce-4212-8ef5-7887e356ab15 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Lock "17cce398-d2f8-47a6-b714-c4e54caec516" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.336s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.558646] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "17cce398-d2f8-47a6-b714-c4e54caec516" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 81.461s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.558837] env[68798]: INFO nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] During sync_power_state the instance has a pending task (deleting). Skip. [ 1870.559015] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "17cce398-d2f8-47a6-b714-c4e54caec516" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.561259] env[68798]: DEBUG nova.scheduler.client.report [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1870.574539] env[68798]: DEBUG oslo_concurrency.lockutils [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.288s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.575282] env[68798]: DEBUG nova.compute.manager [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1870.577457] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.267s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.623785] env[68798]: DEBUG nova.compute.utils [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1870.625314] env[68798]: DEBUG nova.compute.manager [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1870.625485] env[68798]: DEBUG nova.network.neutron [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1870.632162] env[68798]: INFO nova.compute.manager [None req-59c5c1db-40ce-4212-8ef5-7887e356ab15 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: 17cce398-d2f8-47a6-b714-c4e54caec516] Successfully reverted task state from None on failure for instance. [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server [None req-59c5c1db-40ce-4212-8ef5-7887e356ab15 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-ba6bd05b-81db-4be3-be4a-bb8b27ccaa73'] [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server raise self.value [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server raise self.value [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server raise self.value [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 1870.638032] env[68798]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server raise self.value [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server raise self.value [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1870.639745] env[68798]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1870.641881] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1870.641881] env[68798]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1870.641881] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1870.641881] env[68798]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1870.641881] env[68798]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1870.641881] env[68798]: ERROR oslo_messaging.rpc.server [ 1870.641881] env[68798]: DEBUG nova.compute.manager [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1870.698072] env[68798]: DEBUG nova.policy [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e4b5f64aab104581ace03fbacba5e7f3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0b707226298f42a2af5b7fec8e680b16', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 1870.712044] env[68798]: DEBUG nova.compute.manager [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1870.741816] env[68798]: DEBUG nova.virt.hardware [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1870.742069] env[68798]: DEBUG nova.virt.hardware [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1870.742232] env[68798]: DEBUG nova.virt.hardware [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1870.742418] env[68798]: DEBUG nova.virt.hardware [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1870.742609] env[68798]: DEBUG nova.virt.hardware [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1870.742764] env[68798]: DEBUG nova.virt.hardware [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1870.742975] env[68798]: DEBUG nova.virt.hardware [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1870.743153] env[68798]: DEBUG nova.virt.hardware [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1870.743368] env[68798]: DEBUG nova.virt.hardware [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1870.743613] env[68798]: DEBUG nova.virt.hardware [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1870.743797] env[68798]: DEBUG nova.virt.hardware [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1870.744720] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f232c15a-6702-4c3a-a5f6-88a62c0e84d6 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.755305] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-325004d3-fa34-4274-82ae-3c3f25f34d69 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.781355] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ae6f34-615e-4a9f-a845-bf7f2f6f1708 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.789454] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e9c16a0-8b27-4d44-8593-490e292fc62e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.821283] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2245664-9a8a-4bfb-865f-7f23d5c256b0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.829465] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d4ac26-3fb7-45ab-9187-49dea8e84a51 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.843356] env[68798]: DEBUG nova.compute.provider_tree [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1870.854067] env[68798]: DEBUG nova.scheduler.client.report [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1870.870872] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.293s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.871737] env[68798]: ERROR nova.compute.manager [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70. [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Traceback (most recent call last): [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] result = getattr(controller, method)(*args, **kwargs) [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] return self._get(image_id) [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] resp, body = self.http_client.get(url, headers=header) [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] return self.request(url, 'GET', **kwargs) [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] return self._handle_response(resp) [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] raise exc.from_response(resp, resp.content) [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] During handling of the above exception, another exception occurred: [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Traceback (most recent call last): [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] self.driver.spawn(context, instance, image_meta, [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] self._fetch_image_if_missing(context, vi) [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] image_fetch(context, vi, tmp_image_ds_loc) [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] images.fetch_image( [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] metadata = IMAGE_API.get(context, image_ref) [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1870.871737] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] return session.show(context, image_id, [ 1870.872931] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1870.872931] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] _reraise_translated_image_exception(image_id) [ 1870.872931] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1870.872931] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] raise new_exc.with_traceback(exc_trace) [ 1870.872931] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1870.872931] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1870.872931] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1870.872931] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] result = getattr(controller, method)(*args, **kwargs) [ 1870.872931] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1870.872931] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] return self._get(image_id) [ 1870.872931] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1870.872931] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1870.872931] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1870.872931] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] resp, body = self.http_client.get(url, headers=header) [ 1870.872931] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1870.872931] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] return self.request(url, 'GET', **kwargs) [ 1870.872931] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1870.872931] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] return self._handle_response(resp) [ 1870.872931] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1870.872931] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] raise exc.from_response(resp, resp.content) [ 1870.872931] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] nova.exception.ImageNotAuthorized: Not authorized for image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70. [ 1870.872931] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] [ 1870.872931] env[68798]: DEBUG nova.compute.utils [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Not authorized for image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70. {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1870.874281] env[68798]: DEBUG nova.compute.manager [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Build of instance ca976c34-4eb0-46aa-a243-91401f842c32 was re-scheduled: Not authorized for image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70. {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1870.874756] env[68798]: DEBUG nova.compute.manager [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1870.874984] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquiring lock "refresh_cache-ca976c34-4eb0-46aa-a243-91401f842c32" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1870.875148] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquired lock "refresh_cache-ca976c34-4eb0-46aa-a243-91401f842c32" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1870.875313] env[68798]: DEBUG nova.network.neutron [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1870.912670] env[68798]: DEBUG nova.network.neutron [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1871.020689] env[68798]: DEBUG nova.network.neutron [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1871.032617] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Releasing lock "refresh_cache-ca976c34-4eb0-46aa-a243-91401f842c32" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1871.032688] env[68798]: DEBUG nova.compute.manager [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1871.032848] env[68798]: DEBUG nova.compute.manager [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Skipping network deallocation for instance since networking was not requested. {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1871.119869] env[68798]: DEBUG nova.network.neutron [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Successfully created port: d3afc40b-ee96-4649-890f-05b6574dcba9 {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1871.127571] env[68798]: INFO nova.scheduler.client.report [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Deleted allocations for instance ca976c34-4eb0-46aa-a243-91401f842c32 [ 1871.154054] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fb0fa8d4-7999-49ec-bf2f-4cabb5247db4 tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Lock "ca976c34-4eb0-46aa-a243-91401f842c32" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 630.657s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1871.154343] env[68798]: DEBUG oslo_concurrency.lockutils [None req-b9f35189-6ffb-4671-875c-64380177de1b tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Lock "ca976c34-4eb0-46aa-a243-91401f842c32" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 434.857s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1871.154573] env[68798]: DEBUG oslo_concurrency.lockutils [None req-b9f35189-6ffb-4671-875c-64380177de1b tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquiring lock "ca976c34-4eb0-46aa-a243-91401f842c32-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1871.154785] env[68798]: DEBUG oslo_concurrency.lockutils [None req-b9f35189-6ffb-4671-875c-64380177de1b tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Lock "ca976c34-4eb0-46aa-a243-91401f842c32-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1871.154955] env[68798]: DEBUG oslo_concurrency.lockutils [None req-b9f35189-6ffb-4671-875c-64380177de1b tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Lock "ca976c34-4eb0-46aa-a243-91401f842c32-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1871.158643] env[68798]: INFO nova.compute.manager [None req-b9f35189-6ffb-4671-875c-64380177de1b tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Terminating instance [ 1871.163620] env[68798]: DEBUG oslo_concurrency.lockutils [None req-b9f35189-6ffb-4671-875c-64380177de1b tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquiring lock "refresh_cache-ca976c34-4eb0-46aa-a243-91401f842c32" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1871.163808] env[68798]: DEBUG oslo_concurrency.lockutils [None req-b9f35189-6ffb-4671-875c-64380177de1b tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Acquired lock "refresh_cache-ca976c34-4eb0-46aa-a243-91401f842c32" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1871.163988] env[68798]: DEBUG nova.network.neutron [None req-b9f35189-6ffb-4671-875c-64380177de1b tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1871.203116] env[68798]: DEBUG nova.network.neutron [None req-b9f35189-6ffb-4671-875c-64380177de1b tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1871.347286] env[68798]: DEBUG nova.network.neutron [None req-b9f35189-6ffb-4671-875c-64380177de1b tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1871.356398] env[68798]: DEBUG oslo_concurrency.lockutils [None req-b9f35189-6ffb-4671-875c-64380177de1b tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Releasing lock "refresh_cache-ca976c34-4eb0-46aa-a243-91401f842c32" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1871.356822] env[68798]: DEBUG nova.compute.manager [None req-b9f35189-6ffb-4671-875c-64380177de1b tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1871.357032] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f35189-6ffb-4671-875c-64380177de1b tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1871.357605] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d5df086a-d4dc-465d-abbf-8f7d0962969e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.368037] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff16f9b2-edc6-4091-ba45-a950e4db0215 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.406324] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-b9f35189-6ffb-4671-875c-64380177de1b tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ca976c34-4eb0-46aa-a243-91401f842c32 could not be found. [ 1871.406540] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-b9f35189-6ffb-4671-875c-64380177de1b tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1871.406719] env[68798]: INFO nova.compute.manager [None req-b9f35189-6ffb-4671-875c-64380177de1b tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1871.406969] env[68798]: DEBUG oslo.service.loopingcall [None req-b9f35189-6ffb-4671-875c-64380177de1b tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1871.407210] env[68798]: DEBUG nova.compute.manager [-] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1871.407609] env[68798]: DEBUG nova.network.neutron [-] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1871.526227] env[68798]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68798) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1871.526504] env[68798]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-55792843-4281-4674-96ba-86173c3df260'] [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1871.527018] env[68798]: ERROR oslo.service.loopingcall [ 1871.528551] env[68798]: ERROR nova.compute.manager [None req-b9f35189-6ffb-4671-875c-64380177de1b tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1871.568088] env[68798]: ERROR nova.compute.manager [None req-b9f35189-6ffb-4671-875c-64380177de1b tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Traceback (most recent call last): [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] ret = obj(*args, **kwargs) [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] exception_handler_v20(status_code, error_body) [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] raise client_exc(message=error_message, [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Neutron server returns request_ids: ['req-55792843-4281-4674-96ba-86173c3df260'] [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] During handling of the above exception, another exception occurred: [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Traceback (most recent call last): [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] self._delete_instance(context, instance, bdms) [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] self._shutdown_instance(context, instance, bdms) [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] self._try_deallocate_network(context, instance, requested_networks) [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] with excutils.save_and_reraise_exception(): [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] self.force_reraise() [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] raise self.value [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] _deallocate_network_with_retries() [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] return evt.wait() [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] result = hub.switch() [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] return self.greenlet.switch() [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] result = func(*self.args, **self.kw) [ 1871.568088] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] result = f(*args, **kwargs) [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] self._deallocate_network( [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] self.network_api.deallocate_for_instance( [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] data = neutron.list_ports(**search_opts) [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] ret = obj(*args, **kwargs) [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] return self.list('ports', self.ports_path, retrieve_all, [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] ret = obj(*args, **kwargs) [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] for r in self._pagination(collection, path, **params): [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] res = self.get(path, params=params) [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] ret = obj(*args, **kwargs) [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] return self.retry_request("GET", action, body=body, [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] ret = obj(*args, **kwargs) [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] return self.do_request(method, action, body=body, [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] ret = obj(*args, **kwargs) [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] self._handle_fault_response(status_code, replybody, resp) [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1871.570105] env[68798]: ERROR nova.compute.manager [instance: ca976c34-4eb0-46aa-a243-91401f842c32] [ 1871.600472] env[68798]: DEBUG oslo_concurrency.lockutils [None req-b9f35189-6ffb-4671-875c-64380177de1b tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Lock "ca976c34-4eb0-46aa-a243-91401f842c32" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.446s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1871.602041] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "ca976c34-4eb0-46aa-a243-91401f842c32" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 82.503s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1871.602041] env[68798]: INFO nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] During sync_power_state the instance has a pending task (deleting). Skip. [ 1871.602390] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "ca976c34-4eb0-46aa-a243-91401f842c32" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1871.656213] env[68798]: INFO nova.compute.manager [None req-b9f35189-6ffb-4671-875c-64380177de1b tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] [instance: ca976c34-4eb0-46aa-a243-91401f842c32] Successfully reverted task state from None on failure for instance. [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server [None req-b9f35189-6ffb-4671-875c-64380177de1b tempest-ServerShowV247Test-142210285 tempest-ServerShowV247Test-142210285-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-55792843-4281-4674-96ba-86173c3df260'] [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server raise self.value [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server raise self.value [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server raise self.value [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 1871.659967] env[68798]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server raise self.value [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server raise self.value [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1871.661672] env[68798]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1871.663334] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1871.663334] env[68798]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1871.663334] env[68798]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1871.663334] env[68798]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1871.663334] env[68798]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1871.663334] env[68798]: ERROR oslo_messaging.rpc.server [ 1871.842098] env[68798]: DEBUG nova.compute.manager [req-f6e25397-666f-4539-9fcf-1146e3f6cd17 req-7f1a402f-99ec-49bc-951f-581716d336ce service nova] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Received event network-vif-plugged-d3afc40b-ee96-4649-890f-05b6574dcba9 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1871.842360] env[68798]: DEBUG oslo_concurrency.lockutils [req-f6e25397-666f-4539-9fcf-1146e3f6cd17 req-7f1a402f-99ec-49bc-951f-581716d336ce service nova] Acquiring lock "de697c7a-bcc4-4d01-a9ec-8467e89d4ada-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1871.842621] env[68798]: DEBUG oslo_concurrency.lockutils [req-f6e25397-666f-4539-9fcf-1146e3f6cd17 req-7f1a402f-99ec-49bc-951f-581716d336ce service nova] Lock "de697c7a-bcc4-4d01-a9ec-8467e89d4ada-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1871.842833] env[68798]: DEBUG oslo_concurrency.lockutils [req-f6e25397-666f-4539-9fcf-1146e3f6cd17 req-7f1a402f-99ec-49bc-951f-581716d336ce service nova] Lock "de697c7a-bcc4-4d01-a9ec-8467e89d4ada-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1871.843102] env[68798]: DEBUG nova.compute.manager [req-f6e25397-666f-4539-9fcf-1146e3f6cd17 req-7f1a402f-99ec-49bc-951f-581716d336ce service nova] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] No waiting events found dispatching network-vif-plugged-d3afc40b-ee96-4649-890f-05b6574dcba9 {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1871.843315] env[68798]: WARNING nova.compute.manager [req-f6e25397-666f-4539-9fcf-1146e3f6cd17 req-7f1a402f-99ec-49bc-951f-581716d336ce service nova] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Received unexpected event network-vif-plugged-d3afc40b-ee96-4649-890f-05b6574dcba9 for instance with vm_state building and task_state spawning. [ 1872.124762] env[68798]: DEBUG nova.network.neutron [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Successfully updated port: d3afc40b-ee96-4649-890f-05b6574dcba9 {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1872.137017] env[68798]: DEBUG oslo_concurrency.lockutils [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Acquiring lock "refresh_cache-de697c7a-bcc4-4d01-a9ec-8467e89d4ada" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1872.137184] env[68798]: DEBUG oslo_concurrency.lockutils [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Acquired lock "refresh_cache-de697c7a-bcc4-4d01-a9ec-8467e89d4ada" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1872.137338] env[68798]: DEBUG nova.network.neutron [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1872.179880] env[68798]: DEBUG nova.network.neutron [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1872.366334] env[68798]: DEBUG nova.network.neutron [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Updating instance_info_cache with network_info: [{"id": "d3afc40b-ee96-4649-890f-05b6574dcba9", "address": "fa:16:3e:1e:0b:65", "network": {"id": "13ce71fc-1c48-4b41-a7de-20d9ac239512", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1014472592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0b707226298f42a2af5b7fec8e680b16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1e7173e-4163-4212-9339-aea3eddd359e", "external-id": "nsx-vlan-transportzone-525", "segmentation_id": 525, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3afc40b-ee", "ovs_interfaceid": "d3afc40b-ee96-4649-890f-05b6574dcba9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1872.381861] env[68798]: DEBUG oslo_concurrency.lockutils [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Releasing lock "refresh_cache-de697c7a-bcc4-4d01-a9ec-8467e89d4ada" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1872.382197] env[68798]: DEBUG nova.compute.manager [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Instance network_info: |[{"id": "d3afc40b-ee96-4649-890f-05b6574dcba9", "address": "fa:16:3e:1e:0b:65", "network": {"id": "13ce71fc-1c48-4b41-a7de-20d9ac239512", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1014472592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0b707226298f42a2af5b7fec8e680b16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1e7173e-4163-4212-9339-aea3eddd359e", "external-id": "nsx-vlan-transportzone-525", "segmentation_id": 525, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3afc40b-ee", "ovs_interfaceid": "d3afc40b-ee96-4649-890f-05b6574dcba9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1872.382879] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:0b:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1e7173e-4163-4212-9339-aea3eddd359e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd3afc40b-ee96-4649-890f-05b6574dcba9', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1872.391189] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Creating folder: Project (0b707226298f42a2af5b7fec8e680b16). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1872.391983] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8deb7691-ccb7-4ba5-98de-953414612aac {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.406521] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Created folder: Project (0b707226298f42a2af5b7fec8e680b16) in parent group-v834492. [ 1872.406739] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Creating folder: Instances. Parent ref: group-v834595. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1872.406993] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c07148bb-7568-4196-b500-0071e56fe03c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.419793] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Created folder: Instances in parent group-v834595. [ 1872.420101] env[68798]: DEBUG oslo.service.loopingcall [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1872.420462] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1872.420619] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c183059f-f848-4c3e-8a97-c699ddcff3d3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.441134] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1872.441134] env[68798]: value = "task-4217706" [ 1872.441134] env[68798]: _type = "Task" [ 1872.441134] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.449881] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217706, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.953076] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217706, 'name': CreateVM_Task, 'duration_secs': 0.326332} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.953468] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1872.960707] env[68798]: DEBUG oslo_concurrency.lockutils [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1872.960902] env[68798]: DEBUG oslo_concurrency.lockutils [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1872.961236] env[68798]: DEBUG oslo_concurrency.lockutils [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1872.961499] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31ad9057-23ea-4ed3-9a02-4cc269ae744d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.966760] env[68798]: DEBUG oslo_vmware.api [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Waiting for the task: (returnval){ [ 1872.966760] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]529aa99b-5a76-2bfd-20a7-279266a4540e" [ 1872.966760] env[68798]: _type = "Task" [ 1872.966760] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.975201] env[68798]: DEBUG oslo_vmware.api [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]529aa99b-5a76-2bfd-20a7-279266a4540e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.477690] env[68798]: DEBUG oslo_concurrency.lockutils [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1873.478028] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1873.478391] env[68798]: DEBUG oslo_concurrency.lockutils [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1873.867340] env[68798]: DEBUG nova.compute.manager [req-daf72d6a-f56b-46f5-ad33-b5365f32c431 req-4021776f-5e6a-4911-8ffd-a8306881acd2 service nova] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Received event network-changed-d3afc40b-ee96-4649-890f-05b6574dcba9 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1873.867537] env[68798]: DEBUG nova.compute.manager [req-daf72d6a-f56b-46f5-ad33-b5365f32c431 req-4021776f-5e6a-4911-8ffd-a8306881acd2 service nova] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Refreshing instance network info cache due to event network-changed-d3afc40b-ee96-4649-890f-05b6574dcba9. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1873.867748] env[68798]: DEBUG oslo_concurrency.lockutils [req-daf72d6a-f56b-46f5-ad33-b5365f32c431 req-4021776f-5e6a-4911-8ffd-a8306881acd2 service nova] Acquiring lock "refresh_cache-de697c7a-bcc4-4d01-a9ec-8467e89d4ada" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1873.868016] env[68798]: DEBUG oslo_concurrency.lockutils [req-daf72d6a-f56b-46f5-ad33-b5365f32c431 req-4021776f-5e6a-4911-8ffd-a8306881acd2 service nova] Acquired lock "refresh_cache-de697c7a-bcc4-4d01-a9ec-8467e89d4ada" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1873.868195] env[68798]: DEBUG nova.network.neutron [req-daf72d6a-f56b-46f5-ad33-b5365f32c431 req-4021776f-5e6a-4911-8ffd-a8306881acd2 service nova] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Refreshing network info cache for port d3afc40b-ee96-4649-890f-05b6574dcba9 {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1874.161030] env[68798]: DEBUG nova.network.neutron [req-daf72d6a-f56b-46f5-ad33-b5365f32c431 req-4021776f-5e6a-4911-8ffd-a8306881acd2 service nova] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Updated VIF entry in instance network info cache for port d3afc40b-ee96-4649-890f-05b6574dcba9. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1874.161407] env[68798]: DEBUG nova.network.neutron [req-daf72d6a-f56b-46f5-ad33-b5365f32c431 req-4021776f-5e6a-4911-8ffd-a8306881acd2 service nova] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Updating instance_info_cache with network_info: [{"id": "d3afc40b-ee96-4649-890f-05b6574dcba9", "address": "fa:16:3e:1e:0b:65", "network": {"id": "13ce71fc-1c48-4b41-a7de-20d9ac239512", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1014472592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0b707226298f42a2af5b7fec8e680b16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1e7173e-4163-4212-9339-aea3eddd359e", "external-id": "nsx-vlan-transportzone-525", "segmentation_id": 525, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3afc40b-ee", "ovs_interfaceid": "d3afc40b-ee96-4649-890f-05b6574dcba9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1874.172476] env[68798]: DEBUG oslo_concurrency.lockutils [req-daf72d6a-f56b-46f5-ad33-b5365f32c431 req-4021776f-5e6a-4911-8ffd-a8306881acd2 service nova] Releasing lock "refresh_cache-de697c7a-bcc4-4d01-a9ec-8467e89d4ada" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1891.431289] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1892.048253] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1893.043763] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1893.048749] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1893.048749] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1893.048749] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1894.049314] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1895.049705] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1895.050107] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1895.050107] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1895.073825] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1895.074114] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1895.074197] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1895.074278] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1895.074371] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1895.074494] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1895.074645] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1895.074796] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1895.074922] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1895.075055] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1899.048559] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1901.045892] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1903.049100] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1903.061906] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1903.062177] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1903.062359] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1903.062517] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1903.063650] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5d33d4a-dd9b-4bd9-b657-cca15d13330a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.072924] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97cce370-b493-4a76-833c-5d0ec277d418 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.087401] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dc02c28-1850-47ee-a975-ad898ec72f5c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.093999] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a9b9f67-2639-4c85-93e7-d1fcdde877b8 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.124160] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180757MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1903.124308] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1903.124504] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1903.199276] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 36980008-f639-4c88-afcf-0dba40420b87 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1903.199434] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance b430775d-fcfb-4233-bc78-87d279e82fb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1903.199562] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 71c99eda-d55d-4d60-92d2-a5553c3c3760 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1903.199684] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance a4e41ed1-2b39-4475-bd13-1680ff46ff6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1903.199813] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ab6c3a4b-2208-49c8-b92f-1f08c0b225f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1903.200317] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 57a34323-ebdd-4495-ab62-f7b82ab804d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1903.200317] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 6f0e769a-33db-48c6-9a88-cceb310cb819 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1903.200317] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ce408b93-3713-4819-8c80-63735d9a5467 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1903.200317] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance de697c7a-bcc4-4d01-a9ec-8467e89d4ada actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1903.200536] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1903.200635] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1903.318068] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-971be0d5-9f42-4341-aa96-c1b918888674 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.326389] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e98b28f0-6f9d-40b6-9661-fb501d51e19a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.355739] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c727180-0974-4604-b36b-46aee7f33a68 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.363510] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1e40ec7-ebc6-42cb-b80c-efb3dfe2de99 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.376738] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1903.385303] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1903.399054] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1903.399286] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.275s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1915.788285] env[68798]: WARNING oslo_vmware.rw_handles [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1915.788285] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1915.788285] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1915.788285] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1915.788285] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1915.788285] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 1915.788285] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1915.788285] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1915.788285] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1915.788285] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1915.788285] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1915.788285] env[68798]: ERROR oslo_vmware.rw_handles [ 1915.788942] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/f05176f6-408c-4e63-9a59-3afcead9a432/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1915.790989] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1915.791265] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Copying Virtual Disk [datastore1] vmware_temp/f05176f6-408c-4e63-9a59-3afcead9a432/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/f05176f6-408c-4e63-9a59-3afcead9a432/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1915.791554] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7d945f9f-93a6-49df-9284-b279297b4f1f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.799792] env[68798]: DEBUG oslo_vmware.api [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Waiting for the task: (returnval){ [ 1915.799792] env[68798]: value = "task-4217707" [ 1915.799792] env[68798]: _type = "Task" [ 1915.799792] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.809397] env[68798]: DEBUG oslo_vmware.api [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Task: {'id': task-4217707, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.310235] env[68798]: DEBUG oslo_vmware.exceptions [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1916.310514] env[68798]: DEBUG oslo_concurrency.lockutils [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1916.311087] env[68798]: ERROR nova.compute.manager [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1916.311087] env[68798]: Faults: ['InvalidArgument'] [ 1916.311087] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] Traceback (most recent call last): [ 1916.311087] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1916.311087] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] yield resources [ 1916.311087] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1916.311087] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] self.driver.spawn(context, instance, image_meta, [ 1916.311087] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1916.311087] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1916.311087] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1916.311087] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] self._fetch_image_if_missing(context, vi) [ 1916.311087] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1916.311087] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] image_cache(vi, tmp_image_ds_loc) [ 1916.311087] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1916.311087] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] vm_util.copy_virtual_disk( [ 1916.311087] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1916.311087] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] session._wait_for_task(vmdk_copy_task) [ 1916.311087] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1916.311087] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] return self.wait_for_task(task_ref) [ 1916.311087] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1916.311087] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] return evt.wait() [ 1916.311087] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1916.311087] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] result = hub.switch() [ 1916.311087] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1916.311087] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] return self.greenlet.switch() [ 1916.311087] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1916.311087] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] self.f(*self.args, **self.kw) [ 1916.311087] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1916.311087] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] raise exceptions.translate_fault(task_info.error) [ 1916.311087] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1916.311087] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] Faults: ['InvalidArgument'] [ 1916.311087] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] [ 1916.312263] env[68798]: INFO nova.compute.manager [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Terminating instance [ 1916.314280] env[68798]: DEBUG nova.compute.manager [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1916.314475] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1916.314759] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1916.314960] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1916.315706] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad15ff6-6f4c-4de5-9949-96c9777a79a3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.318524] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6098f4de-942d-4921-afd7-89502a5477fa {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.324667] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1916.324920] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-91a71f7a-7fe2-46cb-8f4d-3f474f72098a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.327221] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1916.327399] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1916.328455] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b531a703-226a-4d41-b754-e25203a36121 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.333835] env[68798]: DEBUG oslo_vmware.api [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Waiting for the task: (returnval){ [ 1916.333835] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52ef4844-1584-dad1-8487-f9f04b752ed0" [ 1916.333835] env[68798]: _type = "Task" [ 1916.333835] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.343542] env[68798]: DEBUG oslo_vmware.api [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52ef4844-1584-dad1-8487-f9f04b752ed0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.400550] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1916.400762] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1916.400937] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Deleting the datastore file [datastore1] 36980008-f639-4c88-afcf-0dba40420b87 {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1916.401229] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b4781cfe-d399-4ff4-b355-e217c49b318e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.407030] env[68798]: DEBUG oslo_vmware.api [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Waiting for the task: (returnval){ [ 1916.407030] env[68798]: value = "task-4217709" [ 1916.407030] env[68798]: _type = "Task" [ 1916.407030] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.415353] env[68798]: DEBUG oslo_vmware.api [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Task: {'id': task-4217709, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.844070] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1916.844519] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Creating directory with path [datastore1] vmware_temp/055698d1-7dda-487c-b973-4bc1ededdaef/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1916.844592] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-88a76ef1-2a24-446a-8338-e7d1624f03c1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.856013] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Created directory with path [datastore1] vmware_temp/055698d1-7dda-487c-b973-4bc1ededdaef/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1916.856241] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Fetch image to [datastore1] vmware_temp/055698d1-7dda-487c-b973-4bc1ededdaef/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1916.856415] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/055698d1-7dda-487c-b973-4bc1ededdaef/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1916.857176] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebd4e025-9a93-4c21-8a39-98dcefe9a69d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.864451] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-013e26b1-5e84-433d-ad91-d5f63cd99142 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.874064] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7da7a557-a9d8-455e-863b-6faac36c653d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.906067] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fe71a44-901e-4688-b999-98453752bc84 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.921125] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-dc077eb0-957b-41f2-bb85-184dc4d6fae3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.923162] env[68798]: DEBUG oslo_vmware.api [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Task: {'id': task-4217709, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068795} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.923425] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1916.923605] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1916.923776] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1916.923959] env[68798]: INFO nova.compute.manager [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1916.926188] env[68798]: DEBUG nova.compute.claims [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1916.926364] env[68798]: DEBUG oslo_concurrency.lockutils [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1916.926578] env[68798]: DEBUG oslo_concurrency.lockutils [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1916.954866] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1917.081399] env[68798]: DEBUG oslo_vmware.rw_handles [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/055698d1-7dda-487c-b973-4bc1ededdaef/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1917.137266] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b51aa27c-f241-404c-90ec-496dffedc032 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.142065] env[68798]: DEBUG oslo_vmware.rw_handles [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1917.142280] env[68798]: DEBUG oslo_vmware.rw_handles [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/055698d1-7dda-487c-b973-4bc1ededdaef/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1917.146313] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d240a2-792e-498a-827d-b8e3856d3a43 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.176069] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8029edc-2d57-4535-86bb-e98722668a2f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.184204] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa89be1-b159-49b1-8ccb-bf2c5bc71675 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.197358] env[68798]: DEBUG nova.compute.provider_tree [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1917.206268] env[68798]: DEBUG nova.scheduler.client.report [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1917.220209] env[68798]: DEBUG oslo_concurrency.lockutils [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.293s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.220760] env[68798]: ERROR nova.compute.manager [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1917.220760] env[68798]: Faults: ['InvalidArgument'] [ 1917.220760] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] Traceback (most recent call last): [ 1917.220760] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1917.220760] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] self.driver.spawn(context, instance, image_meta, [ 1917.220760] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1917.220760] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1917.220760] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1917.220760] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] self._fetch_image_if_missing(context, vi) [ 1917.220760] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1917.220760] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] image_cache(vi, tmp_image_ds_loc) [ 1917.220760] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1917.220760] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] vm_util.copy_virtual_disk( [ 1917.220760] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1917.220760] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] session._wait_for_task(vmdk_copy_task) [ 1917.220760] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1917.220760] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] return self.wait_for_task(task_ref) [ 1917.220760] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1917.220760] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] return evt.wait() [ 1917.220760] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1917.220760] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] result = hub.switch() [ 1917.220760] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1917.220760] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] return self.greenlet.switch() [ 1917.220760] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1917.220760] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] self.f(*self.args, **self.kw) [ 1917.220760] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1917.220760] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] raise exceptions.translate_fault(task_info.error) [ 1917.220760] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1917.220760] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] Faults: ['InvalidArgument'] [ 1917.220760] env[68798]: ERROR nova.compute.manager [instance: 36980008-f639-4c88-afcf-0dba40420b87] [ 1917.221636] env[68798]: DEBUG nova.compute.utils [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1917.223194] env[68798]: DEBUG nova.compute.manager [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Build of instance 36980008-f639-4c88-afcf-0dba40420b87 was re-scheduled: A specified parameter was not correct: fileType [ 1917.223194] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1917.223485] env[68798]: DEBUG nova.compute.manager [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1917.223658] env[68798]: DEBUG nova.compute.manager [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1917.223835] env[68798]: DEBUG nova.compute.manager [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1917.223999] env[68798]: DEBUG nova.network.neutron [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1917.684851] env[68798]: DEBUG nova.network.neutron [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1917.700964] env[68798]: INFO nova.compute.manager [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Took 0.48 seconds to deallocate network for instance. [ 1917.817548] env[68798]: INFO nova.scheduler.client.report [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Deleted allocations for instance 36980008-f639-4c88-afcf-0dba40420b87 [ 1917.841267] env[68798]: DEBUG oslo_concurrency.lockutils [None req-96c5ea7b-9fb5-40b2-94ff-c8c792c21d2b tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Lock "36980008-f639-4c88-afcf-0dba40420b87" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 628.197s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.841610] env[68798]: DEBUG oslo_concurrency.lockutils [None req-04e1efcd-5e61-4e97-833a-2b84a6805ba6 tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Lock "36980008-f639-4c88-afcf-0dba40420b87" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 431.792s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1917.841873] env[68798]: DEBUG oslo_concurrency.lockutils [None req-04e1efcd-5e61-4e97-833a-2b84a6805ba6 tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Acquiring lock "36980008-f639-4c88-afcf-0dba40420b87-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1917.842180] env[68798]: DEBUG oslo_concurrency.lockutils [None req-04e1efcd-5e61-4e97-833a-2b84a6805ba6 tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Lock "36980008-f639-4c88-afcf-0dba40420b87-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1917.842368] env[68798]: DEBUG oslo_concurrency.lockutils [None req-04e1efcd-5e61-4e97-833a-2b84a6805ba6 tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Lock "36980008-f639-4c88-afcf-0dba40420b87-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.844734] env[68798]: INFO nova.compute.manager [None req-04e1efcd-5e61-4e97-833a-2b84a6805ba6 tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Terminating instance [ 1917.846855] env[68798]: DEBUG nova.compute.manager [None req-04e1efcd-5e61-4e97-833a-2b84a6805ba6 tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1917.847063] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-04e1efcd-5e61-4e97-833a-2b84a6805ba6 tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1917.847571] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b0a5599a-aaa4-48ab-bc88-999490cd6414 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.857811] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-954d794f-f243-4be3-b0d0-664cbc29d0e1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.888081] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-04e1efcd-5e61-4e97-833a-2b84a6805ba6 tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 36980008-f639-4c88-afcf-0dba40420b87 could not be found. [ 1917.888338] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-04e1efcd-5e61-4e97-833a-2b84a6805ba6 tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1917.888528] env[68798]: INFO nova.compute.manager [None req-04e1efcd-5e61-4e97-833a-2b84a6805ba6 tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1917.888878] env[68798]: DEBUG oslo.service.loopingcall [None req-04e1efcd-5e61-4e97-833a-2b84a6805ba6 tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1917.889102] env[68798]: DEBUG nova.compute.manager [-] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1917.889203] env[68798]: DEBUG nova.network.neutron [-] [instance: 36980008-f639-4c88-afcf-0dba40420b87] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1917.918174] env[68798]: DEBUG nova.network.neutron [-] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1917.927219] env[68798]: INFO nova.compute.manager [-] [instance: 36980008-f639-4c88-afcf-0dba40420b87] Took 0.04 seconds to deallocate network for instance. [ 1918.035517] env[68798]: DEBUG oslo_concurrency.lockutils [None req-04e1efcd-5e61-4e97-833a-2b84a6805ba6 tempest-ServersNegativeTestMultiTenantJSON-601976384 tempest-ServersNegativeTestMultiTenantJSON-601976384-project-member] Lock "36980008-f639-4c88-afcf-0dba40420b87" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.194s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1918.036451] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "36980008-f639-4c88-afcf-0dba40420b87" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 128.938s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1918.036652] env[68798]: INFO nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 36980008-f639-4c88-afcf-0dba40420b87] During sync_power_state the instance has a pending task (deleting). Skip. [ 1918.036827] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "36980008-f639-4c88-afcf-0dba40420b87" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1936.188278] env[68798]: DEBUG oslo_concurrency.lockutils [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Acquiring lock "538aeb6d-0aca-4d72-af14-859f4397514b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1936.188631] env[68798]: DEBUG oslo_concurrency.lockutils [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Lock "538aeb6d-0aca-4d72-af14-859f4397514b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1936.203985] env[68798]: DEBUG nova.compute.manager [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1936.258523] env[68798]: DEBUG oslo_concurrency.lockutils [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1936.258789] env[68798]: DEBUG oslo_concurrency.lockutils [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1936.260414] env[68798]: INFO nova.compute.claims [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1936.429107] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0833f2ce-e82a-4e2f-b107-48bbe6720b32 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.439108] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-877c60c0-489a-4137-995d-ef403c08f61a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.469724] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f569849-43f0-4171-a9bd-e36f4bd62bcf {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.477705] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf006c47-fcb3-4749-8308-c5cc5b25dda4 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.491723] env[68798]: DEBUG nova.compute.provider_tree [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1936.500949] env[68798]: DEBUG nova.scheduler.client.report [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1936.516979] env[68798]: DEBUG oslo_concurrency.lockutils [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.258s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1936.517842] env[68798]: DEBUG nova.compute.manager [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1936.557881] env[68798]: DEBUG nova.compute.utils [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1936.559647] env[68798]: DEBUG nova.compute.manager [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1936.559904] env[68798]: DEBUG nova.network.neutron [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1936.619676] env[68798]: DEBUG nova.compute.manager [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1936.625562] env[68798]: DEBUG nova.policy [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c2028f2f661549a6b1607fed075b9a35', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ba3727c039d41daacf0d1d32f7261d6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 1936.688575] env[68798]: DEBUG nova.compute.manager [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1936.715946] env[68798]: DEBUG nova.virt.hardware [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1936.716204] env[68798]: DEBUG nova.virt.hardware [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1936.716361] env[68798]: DEBUG nova.virt.hardware [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1936.716557] env[68798]: DEBUG nova.virt.hardware [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1936.716705] env[68798]: DEBUG nova.virt.hardware [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1936.716843] env[68798]: DEBUG nova.virt.hardware [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1936.717067] env[68798]: DEBUG nova.virt.hardware [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1936.717236] env[68798]: DEBUG nova.virt.hardware [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1936.717453] env[68798]: DEBUG nova.virt.hardware [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1936.717632] env[68798]: DEBUG nova.virt.hardware [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1936.717867] env[68798]: DEBUG nova.virt.hardware [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1936.718741] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85b614a5-306b-4875-a5f0-74074c60b9a2 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.727462] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d76b62f9-00e4-45e6-9d8d-d665c5840618 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.231342] env[68798]: DEBUG nova.network.neutron [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Successfully created port: c372309c-3f86-4159-b96c-d6f47b6f4be0 {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1937.811200] env[68798]: DEBUG nova.compute.manager [req-1f26f2c4-5e09-4a44-9df9-f4802e39efb4 req-2c4fe206-341f-4a18-9d96-7fe8511aaf78 service nova] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Received event network-vif-plugged-c372309c-3f86-4159-b96c-d6f47b6f4be0 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1937.811594] env[68798]: DEBUG oslo_concurrency.lockutils [req-1f26f2c4-5e09-4a44-9df9-f4802e39efb4 req-2c4fe206-341f-4a18-9d96-7fe8511aaf78 service nova] Acquiring lock "538aeb6d-0aca-4d72-af14-859f4397514b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1937.811700] env[68798]: DEBUG oslo_concurrency.lockutils [req-1f26f2c4-5e09-4a44-9df9-f4802e39efb4 req-2c4fe206-341f-4a18-9d96-7fe8511aaf78 service nova] Lock "538aeb6d-0aca-4d72-af14-859f4397514b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1937.812085] env[68798]: DEBUG oslo_concurrency.lockutils [req-1f26f2c4-5e09-4a44-9df9-f4802e39efb4 req-2c4fe206-341f-4a18-9d96-7fe8511aaf78 service nova] Lock "538aeb6d-0aca-4d72-af14-859f4397514b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1937.814542] env[68798]: DEBUG nova.compute.manager [req-1f26f2c4-5e09-4a44-9df9-f4802e39efb4 req-2c4fe206-341f-4a18-9d96-7fe8511aaf78 service nova] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] No waiting events found dispatching network-vif-plugged-c372309c-3f86-4159-b96c-d6f47b6f4be0 {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1937.814843] env[68798]: WARNING nova.compute.manager [req-1f26f2c4-5e09-4a44-9df9-f4802e39efb4 req-2c4fe206-341f-4a18-9d96-7fe8511aaf78 service nova] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Received unexpected event network-vif-plugged-c372309c-3f86-4159-b96c-d6f47b6f4be0 for instance with vm_state building and task_state spawning. [ 1937.888781] env[68798]: DEBUG nova.network.neutron [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Successfully updated port: c372309c-3f86-4159-b96c-d6f47b6f4be0 {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1937.927811] env[68798]: DEBUG oslo_concurrency.lockutils [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Acquiring lock "refresh_cache-538aeb6d-0aca-4d72-af14-859f4397514b" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1937.928112] env[68798]: DEBUG oslo_concurrency.lockutils [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Acquired lock "refresh_cache-538aeb6d-0aca-4d72-af14-859f4397514b" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1937.928254] env[68798]: DEBUG nova.network.neutron [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1937.973531] env[68798]: DEBUG nova.network.neutron [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1938.210184] env[68798]: DEBUG nova.network.neutron [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Updating instance_info_cache with network_info: [{"id": "c372309c-3f86-4159-b96c-d6f47b6f4be0", "address": "fa:16:3e:90:70:f3", "network": {"id": "62ee92b6-f97c-4e68-9dbd-f52d1e5dbcb3", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-789824559-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ba3727c039d41daacf0d1d32f7261d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc372309c-3f", "ovs_interfaceid": "c372309c-3f86-4159-b96c-d6f47b6f4be0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1938.225113] env[68798]: DEBUG oslo_concurrency.lockutils [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Releasing lock "refresh_cache-538aeb6d-0aca-4d72-af14-859f4397514b" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1938.225437] env[68798]: DEBUG nova.compute.manager [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Instance network_info: |[{"id": "c372309c-3f86-4159-b96c-d6f47b6f4be0", "address": "fa:16:3e:90:70:f3", "network": {"id": "62ee92b6-f97c-4e68-9dbd-f52d1e5dbcb3", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-789824559-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ba3727c039d41daacf0d1d32f7261d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc372309c-3f", "ovs_interfaceid": "c372309c-3f86-4159-b96c-d6f47b6f4be0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1938.225891] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:90:70:f3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd88b750a-0e7d-4f16-8bd5-8e6d5743b720', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c372309c-3f86-4159-b96c-d6f47b6f4be0', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1938.233437] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Creating folder: Project (4ba3727c039d41daacf0d1d32f7261d6). Parent ref: group-v834492. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1938.234230] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c659afb0-1638-4148-a5f5-13bf362140fe {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.246347] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Created folder: Project (4ba3727c039d41daacf0d1d32f7261d6) in parent group-v834492. [ 1938.246613] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Creating folder: Instances. Parent ref: group-v834598. {{(pid=68798) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1938.246894] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f558983c-52c0-429d-99f2-39e2b9c5eefd {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.257690] env[68798]: INFO nova.virt.vmwareapi.vm_util [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Created folder: Instances in parent group-v834598. [ 1938.257943] env[68798]: DEBUG oslo.service.loopingcall [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1938.258160] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1938.258376] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fe40c293-f815-422d-8c5c-ca0ebcfc3051 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.279710] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1938.279710] env[68798]: value = "task-4217712" [ 1938.279710] env[68798]: _type = "Task" [ 1938.279710] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1938.287689] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217712, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.790012] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217712, 'name': CreateVM_Task, 'duration_secs': 0.28851} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1938.790201] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1938.790996] env[68798]: DEBUG oslo_concurrency.lockutils [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1938.791205] env[68798]: DEBUG oslo_concurrency.lockutils [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1938.791561] env[68798]: DEBUG oslo_concurrency.lockutils [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1938.791819] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-821e6571-22b8-4c4b-88e5-64143216d12e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.796584] env[68798]: DEBUG oslo_vmware.api [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Waiting for the task: (returnval){ [ 1938.796584] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52f1a222-3c76-26ad-6f09-912393a4bf43" [ 1938.796584] env[68798]: _type = "Task" [ 1938.796584] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1938.804275] env[68798]: DEBUG oslo_vmware.api [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52f1a222-3c76-26ad-6f09-912393a4bf43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.309072] env[68798]: DEBUG oslo_concurrency.lockutils [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1939.309389] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1939.309730] env[68798]: DEBUG oslo_concurrency.lockutils [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1939.838813] env[68798]: DEBUG nova.compute.manager [req-9869e01f-bd00-478b-bc9e-675fc7a48d78 req-309bd402-6792-48e3-85a2-ad5a47410bda service nova] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Received event network-changed-c372309c-3f86-4159-b96c-d6f47b6f4be0 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1939.839015] env[68798]: DEBUG nova.compute.manager [req-9869e01f-bd00-478b-bc9e-675fc7a48d78 req-309bd402-6792-48e3-85a2-ad5a47410bda service nova] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Refreshing instance network info cache due to event network-changed-c372309c-3f86-4159-b96c-d6f47b6f4be0. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1939.839234] env[68798]: DEBUG oslo_concurrency.lockutils [req-9869e01f-bd00-478b-bc9e-675fc7a48d78 req-309bd402-6792-48e3-85a2-ad5a47410bda service nova] Acquiring lock "refresh_cache-538aeb6d-0aca-4d72-af14-859f4397514b" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1939.839384] env[68798]: DEBUG oslo_concurrency.lockutils [req-9869e01f-bd00-478b-bc9e-675fc7a48d78 req-309bd402-6792-48e3-85a2-ad5a47410bda service nova] Acquired lock "refresh_cache-538aeb6d-0aca-4d72-af14-859f4397514b" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1939.839591] env[68798]: DEBUG nova.network.neutron [req-9869e01f-bd00-478b-bc9e-675fc7a48d78 req-309bd402-6792-48e3-85a2-ad5a47410bda service nova] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Refreshing network info cache for port c372309c-3f86-4159-b96c-d6f47b6f4be0 {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1940.135108] env[68798]: DEBUG nova.network.neutron [req-9869e01f-bd00-478b-bc9e-675fc7a48d78 req-309bd402-6792-48e3-85a2-ad5a47410bda service nova] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Updated VIF entry in instance network info cache for port c372309c-3f86-4159-b96c-d6f47b6f4be0. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1940.135479] env[68798]: DEBUG nova.network.neutron [req-9869e01f-bd00-478b-bc9e-675fc7a48d78 req-309bd402-6792-48e3-85a2-ad5a47410bda service nova] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Updating instance_info_cache with network_info: [{"id": "c372309c-3f86-4159-b96c-d6f47b6f4be0", "address": "fa:16:3e:90:70:f3", "network": {"id": "62ee92b6-f97c-4e68-9dbd-f52d1e5dbcb3", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-789824559-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ba3727c039d41daacf0d1d32f7261d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc372309c-3f", "ovs_interfaceid": "c372309c-3f86-4159-b96c-d6f47b6f4be0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1940.145151] env[68798]: DEBUG oslo_concurrency.lockutils [req-9869e01f-bd00-478b-bc9e-675fc7a48d78 req-309bd402-6792-48e3-85a2-ad5a47410bda service nova] Releasing lock "refresh_cache-538aeb6d-0aca-4d72-af14-859f4397514b" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1951.399721] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1953.048550] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1953.048905] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1953.049026] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1954.045217] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1954.047799] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1955.049065] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1955.049065] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1955.049430] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1955.070212] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1955.070373] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1955.070495] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1955.070621] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1955.070744] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1955.070873] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1955.070993] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1955.071125] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1955.071243] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1955.071362] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1955.071881] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1959.048638] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1963.050901] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1963.063489] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1963.063489] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1963.063673] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1963.063826] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1963.065267] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-045a05a6-62c6-44d0-b71e-cbb36475ee65 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.073978] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8054375-1500-42aa-9314-90c0ce0971bb {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.087976] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9558e5e-6fd6-44a9-a508-d964ffa5c7bc {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.094277] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c863f3cb-44ac-42c3-96f0-a77bc8f569c0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.122589] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180757MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1963.122721] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1963.122933] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1963.193251] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance b430775d-fcfb-4233-bc78-87d279e82fb5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1963.193478] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 71c99eda-d55d-4d60-92d2-a5553c3c3760 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1963.193620] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance a4e41ed1-2b39-4475-bd13-1680ff46ff6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1963.193745] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ab6c3a4b-2208-49c8-b92f-1f08c0b225f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1963.193868] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 57a34323-ebdd-4495-ab62-f7b82ab804d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1963.193990] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 6f0e769a-33db-48c6-9a88-cceb310cb819 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1963.194124] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ce408b93-3713-4819-8c80-63735d9a5467 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1963.194245] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance de697c7a-bcc4-4d01-a9ec-8467e89d4ada actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1963.194361] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 538aeb6d-0aca-4d72-af14-859f4397514b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1963.194545] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1963.194680] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1963.300027] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1e3935-0db8-4d48-aaa6-ddc78cc63513 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.309190] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96afc898-ff63-45f4-be42-0ef90f32c754 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.338644] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3b62939-46e9-48d2-bc4f-0934e2b35499 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.346320] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96b67106-d84d-4822-b54f-cf4dded9bbf2 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.359407] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1963.368157] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1963.383408] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1963.383611] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.261s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1964.638848] env[68798]: WARNING oslo_vmware.rw_handles [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1964.638848] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1964.638848] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1964.638848] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1964.638848] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1964.638848] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 1964.638848] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1964.638848] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1964.638848] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1964.638848] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1964.638848] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1964.638848] env[68798]: ERROR oslo_vmware.rw_handles [ 1964.639967] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/055698d1-7dda-487c-b973-4bc1ededdaef/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1964.641555] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1964.642103] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Copying Virtual Disk [datastore1] vmware_temp/055698d1-7dda-487c-b973-4bc1ededdaef/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/055698d1-7dda-487c-b973-4bc1ededdaef/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1964.642200] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a2f09df0-80df-4584-bbc9-35b1e0737c8d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.650815] env[68798]: DEBUG oslo_vmware.api [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Waiting for the task: (returnval){ [ 1964.650815] env[68798]: value = "task-4217713" [ 1964.650815] env[68798]: _type = "Task" [ 1964.650815] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1964.659735] env[68798]: DEBUG oslo_vmware.api [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Task: {'id': task-4217713, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.162530] env[68798]: DEBUG oslo_vmware.exceptions [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1965.162989] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1965.163416] env[68798]: ERROR nova.compute.manager [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1965.163416] env[68798]: Faults: ['InvalidArgument'] [ 1965.163416] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Traceback (most recent call last): [ 1965.163416] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1965.163416] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] yield resources [ 1965.163416] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1965.163416] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] self.driver.spawn(context, instance, image_meta, [ 1965.163416] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1965.163416] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1965.163416] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1965.163416] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] self._fetch_image_if_missing(context, vi) [ 1965.163416] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1965.163416] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] image_cache(vi, tmp_image_ds_loc) [ 1965.163416] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1965.163416] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] vm_util.copy_virtual_disk( [ 1965.163416] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1965.163416] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] session._wait_for_task(vmdk_copy_task) [ 1965.163416] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1965.163416] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] return self.wait_for_task(task_ref) [ 1965.163416] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1965.163416] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] return evt.wait() [ 1965.163416] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1965.163416] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] result = hub.switch() [ 1965.163416] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1965.163416] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] return self.greenlet.switch() [ 1965.163416] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1965.163416] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] self.f(*self.args, **self.kw) [ 1965.163416] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1965.163416] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] raise exceptions.translate_fault(task_info.error) [ 1965.163416] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1965.163416] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Faults: ['InvalidArgument'] [ 1965.163416] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] [ 1965.164219] env[68798]: INFO nova.compute.manager [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Terminating instance [ 1965.165409] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1965.165631] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1965.165867] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f95ac49d-e7a4-4205-bf77-545ad61b3553 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.169903] env[68798]: DEBUG nova.compute.manager [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1965.170171] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1965.170976] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61398815-f90c-488f-97a0-2dc700cc06d7 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.175179] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1965.175367] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1965.176479] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8bc0865f-13f9-4d9d-8181-f90c4adeb535 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.180680] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1965.181332] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f7c53458-6234-4ffe-bea2-94a549b5355f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.183802] env[68798]: DEBUG oslo_vmware.api [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Waiting for the task: (returnval){ [ 1965.183802] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]5299ea4f-cc61-9610-c0f1-5b6e269fe341" [ 1965.183802] env[68798]: _type = "Task" [ 1965.183802] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1965.193509] env[68798]: DEBUG oslo_vmware.api [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]5299ea4f-cc61-9610-c0f1-5b6e269fe341, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.259579] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1965.259882] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1965.260084] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Deleting the datastore file [datastore1] b430775d-fcfb-4233-bc78-87d279e82fb5 {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1965.260420] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4efd0204-3821-47b7-b785-8cf1fb0f9e2a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.267462] env[68798]: DEBUG oslo_vmware.api [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Waiting for the task: (returnval){ [ 1965.267462] env[68798]: value = "task-4217715" [ 1965.267462] env[68798]: _type = "Task" [ 1965.267462] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1965.275785] env[68798]: DEBUG oslo_vmware.api [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Task: {'id': task-4217715, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.694407] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1965.694865] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Creating directory with path [datastore1] vmware_temp/0581aa29-287c-421f-be6b-930911d40c27/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1965.694964] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6df82b4b-d311-40e7-896c-60daa7d0e2ab {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.709363] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Created directory with path [datastore1] vmware_temp/0581aa29-287c-421f-be6b-930911d40c27/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1965.709587] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Fetch image to [datastore1] vmware_temp/0581aa29-287c-421f-be6b-930911d40c27/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1965.709696] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/0581aa29-287c-421f-be6b-930911d40c27/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1965.710567] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e96695-4b5f-4ed0-9e3a-33e0ef554feb {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.717327] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e81dfe-f164-4527-a505-8008c0e59c66 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.726524] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e598aadc-b291-45c2-8843-e579251d0253 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.757845] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0db448c3-2f17-4286-be98-8a25b6440c8a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.764554] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f0450f26-95c1-419c-b48f-c428e0817f85 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.779577] env[68798]: DEBUG oslo_vmware.api [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Task: {'id': task-4217715, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073435} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1965.780021] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1965.780319] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1965.780590] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1965.780857] env[68798]: INFO nova.compute.manager [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1965.783782] env[68798]: DEBUG nova.compute.claims [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1965.784078] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1965.784446] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1965.793082] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1965.850268] env[68798]: DEBUG oslo_vmware.rw_handles [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0581aa29-287c-421f-be6b-930911d40c27/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1965.908303] env[68798]: DEBUG oslo_vmware.rw_handles [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1965.908506] env[68798]: DEBUG oslo_vmware.rw_handles [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0581aa29-287c-421f-be6b-930911d40c27/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1965.998662] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f56ada77-fafc-46c7-acad-3599ff4f86e9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.006804] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfb5ba20-0838-45fb-bcaa-b9e88ce46a22 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.036834] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cae2e8a-391c-4513-b374-a3798071a6e7 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.044231] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15167e34-b69b-4b7e-85a5-d51707abda95 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.056976] env[68798]: DEBUG nova.compute.provider_tree [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1966.065718] env[68798]: DEBUG nova.scheduler.client.report [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1966.079548] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.295s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1966.080123] env[68798]: ERROR nova.compute.manager [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1966.080123] env[68798]: Faults: ['InvalidArgument'] [ 1966.080123] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Traceback (most recent call last): [ 1966.080123] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1966.080123] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] self.driver.spawn(context, instance, image_meta, [ 1966.080123] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1966.080123] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1966.080123] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1966.080123] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] self._fetch_image_if_missing(context, vi) [ 1966.080123] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1966.080123] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] image_cache(vi, tmp_image_ds_loc) [ 1966.080123] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1966.080123] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] vm_util.copy_virtual_disk( [ 1966.080123] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1966.080123] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] session._wait_for_task(vmdk_copy_task) [ 1966.080123] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1966.080123] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] return self.wait_for_task(task_ref) [ 1966.080123] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1966.080123] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] return evt.wait() [ 1966.080123] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1966.080123] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] result = hub.switch() [ 1966.080123] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1966.080123] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] return self.greenlet.switch() [ 1966.080123] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1966.080123] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] self.f(*self.args, **self.kw) [ 1966.080123] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1966.080123] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] raise exceptions.translate_fault(task_info.error) [ 1966.080123] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1966.080123] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Faults: ['InvalidArgument'] [ 1966.080123] env[68798]: ERROR nova.compute.manager [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] [ 1966.080905] env[68798]: DEBUG nova.compute.utils [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1966.082236] env[68798]: DEBUG nova.compute.manager [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Build of instance b430775d-fcfb-4233-bc78-87d279e82fb5 was re-scheduled: A specified parameter was not correct: fileType [ 1966.082236] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1966.082603] env[68798]: DEBUG nova.compute.manager [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1966.082773] env[68798]: DEBUG nova.compute.manager [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1966.082943] env[68798]: DEBUG nova.compute.manager [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1966.083124] env[68798]: DEBUG nova.network.neutron [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1966.403789] env[68798]: DEBUG nova.network.neutron [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1966.417021] env[68798]: INFO nova.compute.manager [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Took 0.33 seconds to deallocate network for instance. [ 1966.523851] env[68798]: INFO nova.scheduler.client.report [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Deleted allocations for instance b430775d-fcfb-4233-bc78-87d279e82fb5 [ 1966.549010] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bbd646c5-6708-43ec-9d42-25290ac39a68 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Lock "b430775d-fcfb-4233-bc78-87d279e82fb5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 619.640s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1966.549325] env[68798]: DEBUG oslo_concurrency.lockutils [None req-25c13558-c418-4c4c-9504-bfe026f76772 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Lock "b430775d-fcfb-4233-bc78-87d279e82fb5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 423.883s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1966.549582] env[68798]: DEBUG oslo_concurrency.lockutils [None req-25c13558-c418-4c4c-9504-bfe026f76772 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Acquiring lock "b430775d-fcfb-4233-bc78-87d279e82fb5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1966.550522] env[68798]: DEBUG oslo_concurrency.lockutils [None req-25c13558-c418-4c4c-9504-bfe026f76772 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Lock "b430775d-fcfb-4233-bc78-87d279e82fb5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1966.550522] env[68798]: DEBUG oslo_concurrency.lockutils [None req-25c13558-c418-4c4c-9504-bfe026f76772 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Lock "b430775d-fcfb-4233-bc78-87d279e82fb5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1966.553033] env[68798]: INFO nova.compute.manager [None req-25c13558-c418-4c4c-9504-bfe026f76772 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Terminating instance [ 1966.554574] env[68798]: DEBUG nova.compute.manager [None req-25c13558-c418-4c4c-9504-bfe026f76772 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1966.554768] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-25c13558-c418-4c4c-9504-bfe026f76772 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1966.555294] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e32448c9-b7d9-4161-8387-072d21989673 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.565947] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81dffec8-cf1c-48f1-8f4d-dc5bf6f1897a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.599591] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-25c13558-c418-4c4c-9504-bfe026f76772 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b430775d-fcfb-4233-bc78-87d279e82fb5 could not be found. [ 1966.599823] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-25c13558-c418-4c4c-9504-bfe026f76772 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1966.600071] env[68798]: INFO nova.compute.manager [None req-25c13558-c418-4c4c-9504-bfe026f76772 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1966.600382] env[68798]: DEBUG oslo.service.loopingcall [None req-25c13558-c418-4c4c-9504-bfe026f76772 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1966.600632] env[68798]: DEBUG nova.compute.manager [-] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1966.600761] env[68798]: DEBUG nova.network.neutron [-] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1966.626301] env[68798]: DEBUG nova.network.neutron [-] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1966.634824] env[68798]: INFO nova.compute.manager [-] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] Took 0.03 seconds to deallocate network for instance. [ 1966.723870] env[68798]: DEBUG oslo_concurrency.lockutils [None req-25c13558-c418-4c4c-9504-bfe026f76772 tempest-AttachVolumeTestJSON-1002096979 tempest-AttachVolumeTestJSON-1002096979-project-member] Lock "b430775d-fcfb-4233-bc78-87d279e82fb5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.174s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1966.724933] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "b430775d-fcfb-4233-bc78-87d279e82fb5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 177.626s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1966.725281] env[68798]: INFO nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: b430775d-fcfb-4233-bc78-87d279e82fb5] During sync_power_state the instance has a pending task (deleting). Skip. [ 1966.725487] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "b430775d-fcfb-4233-bc78-87d279e82fb5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1987.340531] env[68798]: DEBUG oslo_concurrency.lockutils [None req-994c90e2-0da4-476f-a06b-78e96f10687c tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Acquiring lock "de697c7a-bcc4-4d01-a9ec-8467e89d4ada" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2011.275426] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquiring lock "5a65c46e-989d-4a8f-9387-86cde7725173" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2011.275788] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "5a65c46e-989d-4a8f-9387-86cde7725173" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2011.286488] env[68798]: DEBUG nova.compute.manager [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2011.336769] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2011.337031] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2011.338502] env[68798]: INFO nova.compute.claims [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2011.381311] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2011.507869] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-106c3a20-304a-4277-9391-e3b1abfa3fda {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.516776] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c3937e0-0fb4-4a93-abe0-1cccfe5d733d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.547144] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fe802df-33a3-45de-8cbc-72598e7f7a36 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.555431] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00a51729-0c03-4235-a644-7f4f96fa0e01 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.569467] env[68798]: DEBUG nova.compute.provider_tree [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2011.579955] env[68798]: DEBUG nova.scheduler.client.report [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2011.596384] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.259s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2011.596954] env[68798]: DEBUG nova.compute.manager [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2011.630241] env[68798]: DEBUG nova.compute.utils [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2011.632317] env[68798]: DEBUG nova.compute.manager [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2011.632529] env[68798]: DEBUG nova.network.neutron [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2011.640901] env[68798]: DEBUG nova.compute.manager [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2011.711713] env[68798]: DEBUG nova.compute.manager [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2011.727483] env[68798]: DEBUG nova.policy [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '58e7ee34608848b39cc2a7114e7d682d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8efeea8a59294c7ca8b499dda555a3d6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 2011.742775] env[68798]: DEBUG nova.virt.hardware [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2011.743142] env[68798]: DEBUG nova.virt.hardware [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2011.743321] env[68798]: DEBUG nova.virt.hardware [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2011.743514] env[68798]: DEBUG nova.virt.hardware [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2011.743663] env[68798]: DEBUG nova.virt.hardware [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2011.743813] env[68798]: DEBUG nova.virt.hardware [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2011.744040] env[68798]: DEBUG nova.virt.hardware [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2011.744205] env[68798]: DEBUG nova.virt.hardware [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2011.744374] env[68798]: DEBUG nova.virt.hardware [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2011.744535] env[68798]: DEBUG nova.virt.hardware [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2011.744710] env[68798]: DEBUG nova.virt.hardware [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2011.745638] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-035fd1ec-8cdb-4864-831c-41b99747030a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.754594] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-516ed05e-3ef2-4e0b-90fb-90a9a9edb6c1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.167195] env[68798]: DEBUG nova.network.neutron [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Successfully created port: 89b9d964-4811-491c-bfa9-fe66da3170c1 {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2012.835417] env[68798]: DEBUG nova.compute.manager [req-0c92ac8c-02f8-4c6c-b14b-b5e0837fbb34 req-2bbaf364-3850-4045-bea1-6e333dee869e service nova] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Received event network-vif-plugged-89b9d964-4811-491c-bfa9-fe66da3170c1 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2012.835680] env[68798]: DEBUG oslo_concurrency.lockutils [req-0c92ac8c-02f8-4c6c-b14b-b5e0837fbb34 req-2bbaf364-3850-4045-bea1-6e333dee869e service nova] Acquiring lock "5a65c46e-989d-4a8f-9387-86cde7725173-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2012.835899] env[68798]: DEBUG oslo_concurrency.lockutils [req-0c92ac8c-02f8-4c6c-b14b-b5e0837fbb34 req-2bbaf364-3850-4045-bea1-6e333dee869e service nova] Lock "5a65c46e-989d-4a8f-9387-86cde7725173-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2012.836079] env[68798]: DEBUG oslo_concurrency.lockutils [req-0c92ac8c-02f8-4c6c-b14b-b5e0837fbb34 req-2bbaf364-3850-4045-bea1-6e333dee869e service nova] Lock "5a65c46e-989d-4a8f-9387-86cde7725173-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2012.836248] env[68798]: DEBUG nova.compute.manager [req-0c92ac8c-02f8-4c6c-b14b-b5e0837fbb34 req-2bbaf364-3850-4045-bea1-6e333dee869e service nova] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] No waiting events found dispatching network-vif-plugged-89b9d964-4811-491c-bfa9-fe66da3170c1 {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2012.836412] env[68798]: WARNING nova.compute.manager [req-0c92ac8c-02f8-4c6c-b14b-b5e0837fbb34 req-2bbaf364-3850-4045-bea1-6e333dee869e service nova] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Received unexpected event network-vif-plugged-89b9d964-4811-491c-bfa9-fe66da3170c1 for instance with vm_state building and task_state spawning. [ 2012.921857] env[68798]: DEBUG nova.network.neutron [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Successfully updated port: 89b9d964-4811-491c-bfa9-fe66da3170c1 {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2012.936929] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquiring lock "refresh_cache-5a65c46e-989d-4a8f-9387-86cde7725173" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2012.937112] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquired lock "refresh_cache-5a65c46e-989d-4a8f-9387-86cde7725173" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2012.937358] env[68798]: DEBUG nova.network.neutron [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2013.006963] env[68798]: DEBUG nova.network.neutron [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2013.047876] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2013.048129] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2013.048283] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 2013.240661] env[68798]: DEBUG nova.network.neutron [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Updating instance_info_cache with network_info: [{"id": "89b9d964-4811-491c-bfa9-fe66da3170c1", "address": "fa:16:3e:b5:4c:5d", "network": {"id": "0b7b81db-f8e9-4c4a-9e95-8ce4e76ecc2f", "bridge": "br-int", "label": "tempest-ServersTestJSON-1522738958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efeea8a59294c7ca8b499dda555a3d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c6324fd-a761-417c-bc85-b6278daecfc5", "external-id": "nsx-vlan-transportzone-426", "segmentation_id": 426, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89b9d964-48", "ovs_interfaceid": "89b9d964-4811-491c-bfa9-fe66da3170c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2013.257181] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Releasing lock "refresh_cache-5a65c46e-989d-4a8f-9387-86cde7725173" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2013.257580] env[68798]: DEBUG nova.compute.manager [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Instance network_info: |[{"id": "89b9d964-4811-491c-bfa9-fe66da3170c1", "address": "fa:16:3e:b5:4c:5d", "network": {"id": "0b7b81db-f8e9-4c4a-9e95-8ce4e76ecc2f", "bridge": "br-int", "label": "tempest-ServersTestJSON-1522738958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efeea8a59294c7ca8b499dda555a3d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c6324fd-a761-417c-bc85-b6278daecfc5", "external-id": "nsx-vlan-transportzone-426", "segmentation_id": 426, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89b9d964-48", "ovs_interfaceid": "89b9d964-4811-491c-bfa9-fe66da3170c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2013.258392] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b5:4c:5d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2c6324fd-a761-417c-bc85-b6278daecfc5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89b9d964-4811-491c-bfa9-fe66da3170c1', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2013.266077] env[68798]: DEBUG oslo.service.loopingcall [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2013.266565] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2013.266803] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e9bd0a57-d519-4894-8b34-6c1b4eae0db3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.287239] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2013.287239] env[68798]: value = "task-4217716" [ 2013.287239] env[68798]: _type = "Task" [ 2013.287239] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2013.295584] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217716, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.388090] env[68798]: WARNING oslo_vmware.rw_handles [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2013.388090] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2013.388090] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2013.388090] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2013.388090] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2013.388090] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 2013.388090] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2013.388090] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2013.388090] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2013.388090] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2013.388090] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2013.388090] env[68798]: ERROR oslo_vmware.rw_handles [ 2013.388090] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/0581aa29-287c-421f-be6b-930911d40c27/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2013.390606] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2013.390978] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Copying Virtual Disk [datastore1] vmware_temp/0581aa29-287c-421f-be6b-930911d40c27/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/0581aa29-287c-421f-be6b-930911d40c27/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2013.391378] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-163f835e-b14d-4197-b118-8d602a927f9b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.400586] env[68798]: DEBUG oslo_vmware.api [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Waiting for the task: (returnval){ [ 2013.400586] env[68798]: value = "task-4217717" [ 2013.400586] env[68798]: _type = "Task" [ 2013.400586] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2013.410984] env[68798]: DEBUG oslo_vmware.api [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Task: {'id': task-4217717, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.797956] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217716, 'name': CreateVM_Task, 'duration_secs': 0.29884} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2013.798204] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2013.798973] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2013.799186] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2013.799498] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2013.799763] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a143568b-6cab-4860-a0aa-5141e90d7a5a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.804742] env[68798]: DEBUG oslo_vmware.api [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Waiting for the task: (returnval){ [ 2013.804742] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]523c4d1f-9b15-b368-d290-f810c840281a" [ 2013.804742] env[68798]: _type = "Task" [ 2013.804742] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2013.812852] env[68798]: DEBUG oslo_vmware.api [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]523c4d1f-9b15-b368-d290-f810c840281a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.911524] env[68798]: DEBUG oslo_vmware.exceptions [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2013.911944] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2013.912541] env[68798]: ERROR nova.compute.manager [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2013.912541] env[68798]: Faults: ['InvalidArgument'] [ 2013.912541] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Traceback (most recent call last): [ 2013.912541] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2013.912541] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] yield resources [ 2013.912541] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2013.912541] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] self.driver.spawn(context, instance, image_meta, [ 2013.912541] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2013.912541] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2013.912541] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2013.912541] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] self._fetch_image_if_missing(context, vi) [ 2013.912541] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2013.912541] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] image_cache(vi, tmp_image_ds_loc) [ 2013.912541] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2013.912541] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] vm_util.copy_virtual_disk( [ 2013.912541] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2013.912541] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] session._wait_for_task(vmdk_copy_task) [ 2013.912541] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2013.912541] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] return self.wait_for_task(task_ref) [ 2013.912541] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2013.912541] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] return evt.wait() [ 2013.912541] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2013.912541] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] result = hub.switch() [ 2013.912541] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2013.912541] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] return self.greenlet.switch() [ 2013.912541] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2013.912541] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] self.f(*self.args, **self.kw) [ 2013.912541] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2013.912541] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] raise exceptions.translate_fault(task_info.error) [ 2013.912541] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2013.912541] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Faults: ['InvalidArgument'] [ 2013.912541] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] [ 2013.913367] env[68798]: INFO nova.compute.manager [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Terminating instance [ 2013.915457] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2013.915457] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2013.915457] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c2a331e6-4b26-4f36-a818-efd5c251815f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.917450] env[68798]: DEBUG nova.compute.manager [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2013.917675] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2013.918497] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2665af7f-dc14-4b64-bf6d-e75e23813a9e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.928897] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2013.929148] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3a9b2aed-c237-4a18-90f2-8e1f1483ee35 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.931766] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2013.931940] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2013.932999] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee705eef-b5d4-4138-81a5-7d0f301b890d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.938400] env[68798]: DEBUG oslo_vmware.api [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Waiting for the task: (returnval){ [ 2013.938400] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52b4fd44-51c2-0aa8-8743-bbecbcc54a0a" [ 2013.938400] env[68798]: _type = "Task" [ 2013.938400] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2013.946550] env[68798]: DEBUG oslo_vmware.api [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52b4fd44-51c2-0aa8-8743-bbecbcc54a0a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.012698] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2014.012896] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2014.013099] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Deleting the datastore file [datastore1] 71c99eda-d55d-4d60-92d2-a5553c3c3760 {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2014.013388] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4f9db2f4-666c-4248-9f93-5a7c14805476 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.021561] env[68798]: DEBUG oslo_vmware.api [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Waiting for the task: (returnval){ [ 2014.021561] env[68798]: value = "task-4217719" [ 2014.021561] env[68798]: _type = "Task" [ 2014.021561] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2014.030812] env[68798]: DEBUG oslo_vmware.api [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Task: {'id': task-4217719, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.317383] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2014.317567] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2014.317748] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2014.448998] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2014.450197] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Creating directory with path [datastore1] vmware_temp/375e522f-2522-4058-83ea-f0f8f5c7f996/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2014.450197] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9d68d20e-6e06-462e-9c92-90aa05a96fb9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.461769] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Created directory with path [datastore1] vmware_temp/375e522f-2522-4058-83ea-f0f8f5c7f996/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2014.462016] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Fetch image to [datastore1] vmware_temp/375e522f-2522-4058-83ea-f0f8f5c7f996/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2014.462208] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/375e522f-2522-4058-83ea-f0f8f5c7f996/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2014.462982] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f97094-6f4e-424a-9016-8e90a3a5408e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.470337] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dcf1ec9-33b3-4815-81e3-f0b81b14d674 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.480575] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56292522-58e3-4a59-a680-0347735c39fb {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.512192] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc7a3598-fb06-4f8f-be19-0d78cf5f3e99 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.518509] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-074d54d4-1de3-432e-8870-fc6a2a46d6cf {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.530456] env[68798]: DEBUG oslo_vmware.api [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Task: {'id': task-4217719, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077223} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2014.530818] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2014.531106] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2014.531372] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2014.531615] env[68798]: INFO nova.compute.manager [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2014.533864] env[68798]: DEBUG nova.compute.claims [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2014.534055] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2014.534282] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2014.543184] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2014.597781] env[68798]: DEBUG oslo_vmware.rw_handles [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/375e522f-2522-4058-83ea-f0f8f5c7f996/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2014.663031] env[68798]: DEBUG oslo_vmware.rw_handles [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2014.663239] env[68798]: DEBUG oslo_vmware.rw_handles [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/375e522f-2522-4058-83ea-f0f8f5c7f996/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2014.756120] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a06f296-edd3-466c-b46e-aea11765fa64 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.764796] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2831fac-c8fc-4281-b579-b674b7b90c03 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.795330] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12c56a21-d3f0-41ab-88a8-22f70a9bee0c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.803355] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6574b03-f1c4-47cc-8fab-8f2b383fb9ce {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.817113] env[68798]: DEBUG nova.compute.provider_tree [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2014.825824] env[68798]: DEBUG nova.scheduler.client.report [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2014.839883] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.305s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2014.840500] env[68798]: ERROR nova.compute.manager [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2014.840500] env[68798]: Faults: ['InvalidArgument'] [ 2014.840500] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Traceback (most recent call last): [ 2014.840500] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2014.840500] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] self.driver.spawn(context, instance, image_meta, [ 2014.840500] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2014.840500] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2014.840500] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2014.840500] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] self._fetch_image_if_missing(context, vi) [ 2014.840500] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2014.840500] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] image_cache(vi, tmp_image_ds_loc) [ 2014.840500] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2014.840500] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] vm_util.copy_virtual_disk( [ 2014.840500] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2014.840500] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] session._wait_for_task(vmdk_copy_task) [ 2014.840500] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2014.840500] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] return self.wait_for_task(task_ref) [ 2014.840500] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2014.840500] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] return evt.wait() [ 2014.840500] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2014.840500] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] result = hub.switch() [ 2014.840500] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2014.840500] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] return self.greenlet.switch() [ 2014.840500] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2014.840500] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] self.f(*self.args, **self.kw) [ 2014.840500] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2014.840500] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] raise exceptions.translate_fault(task_info.error) [ 2014.840500] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2014.840500] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Faults: ['InvalidArgument'] [ 2014.840500] env[68798]: ERROR nova.compute.manager [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] [ 2014.841310] env[68798]: DEBUG nova.compute.utils [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2014.842857] env[68798]: DEBUG nova.compute.manager [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Build of instance 71c99eda-d55d-4d60-92d2-a5553c3c3760 was re-scheduled: A specified parameter was not correct: fileType [ 2014.842857] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2014.843260] env[68798]: DEBUG nova.compute.manager [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2014.843432] env[68798]: DEBUG nova.compute.manager [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2014.843602] env[68798]: DEBUG nova.compute.manager [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2014.843762] env[68798]: DEBUG nova.network.neutron [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2014.865063] env[68798]: DEBUG nova.compute.manager [req-9a723ea9-3dff-41b4-880e-04720abfe94b req-14b942bc-3980-4ab7-8ed1-19cc0c074eb4 service nova] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Received event network-changed-89b9d964-4811-491c-bfa9-fe66da3170c1 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2014.865191] env[68798]: DEBUG nova.compute.manager [req-9a723ea9-3dff-41b4-880e-04720abfe94b req-14b942bc-3980-4ab7-8ed1-19cc0c074eb4 service nova] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Refreshing instance network info cache due to event network-changed-89b9d964-4811-491c-bfa9-fe66da3170c1. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 2014.865405] env[68798]: DEBUG oslo_concurrency.lockutils [req-9a723ea9-3dff-41b4-880e-04720abfe94b req-14b942bc-3980-4ab7-8ed1-19cc0c074eb4 service nova] Acquiring lock "refresh_cache-5a65c46e-989d-4a8f-9387-86cde7725173" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2014.865547] env[68798]: DEBUG oslo_concurrency.lockutils [req-9a723ea9-3dff-41b4-880e-04720abfe94b req-14b942bc-3980-4ab7-8ed1-19cc0c074eb4 service nova] Acquired lock "refresh_cache-5a65c46e-989d-4a8f-9387-86cde7725173" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2014.865708] env[68798]: DEBUG nova.network.neutron [req-9a723ea9-3dff-41b4-880e-04720abfe94b req-14b942bc-3980-4ab7-8ed1-19cc0c074eb4 service nova] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Refreshing network info cache for port 89b9d964-4811-491c-bfa9-fe66da3170c1 {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2015.049399] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2015.049728] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 2015.049728] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2015.069889] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2015.070075] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2015.070180] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2015.070304] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2015.070427] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2015.070547] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2015.070710] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2015.070891] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2015.071038] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 2015.071531] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2015.554235] env[68798]: DEBUG nova.network.neutron [req-9a723ea9-3dff-41b4-880e-04720abfe94b req-14b942bc-3980-4ab7-8ed1-19cc0c074eb4 service nova] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Updated VIF entry in instance network info cache for port 89b9d964-4811-491c-bfa9-fe66da3170c1. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2015.555029] env[68798]: DEBUG nova.network.neutron [req-9a723ea9-3dff-41b4-880e-04720abfe94b req-14b942bc-3980-4ab7-8ed1-19cc0c074eb4 service nova] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Updating instance_info_cache with network_info: [{"id": "89b9d964-4811-491c-bfa9-fe66da3170c1", "address": "fa:16:3e:b5:4c:5d", "network": {"id": "0b7b81db-f8e9-4c4a-9e95-8ce4e76ecc2f", "bridge": "br-int", "label": "tempest-ServersTestJSON-1522738958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efeea8a59294c7ca8b499dda555a3d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c6324fd-a761-417c-bc85-b6278daecfc5", "external-id": "nsx-vlan-transportzone-426", "segmentation_id": 426, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89b9d964-48", "ovs_interfaceid": "89b9d964-4811-491c-bfa9-fe66da3170c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2015.565586] env[68798]: DEBUG oslo_concurrency.lockutils [req-9a723ea9-3dff-41b4-880e-04720abfe94b req-14b942bc-3980-4ab7-8ed1-19cc0c074eb4 service nova] Releasing lock "refresh_cache-5a65c46e-989d-4a8f-9387-86cde7725173" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2015.665746] env[68798]: DEBUG nova.network.neutron [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2015.681589] env[68798]: INFO nova.compute.manager [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Took 0.84 seconds to deallocate network for instance. [ 2015.786665] env[68798]: INFO nova.scheduler.client.report [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Deleted allocations for instance 71c99eda-d55d-4d60-92d2-a5553c3c3760 [ 2015.811150] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ef6c2583-879b-4e8d-a5a6-175ba3f77ed0 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Lock "71c99eda-d55d-4d60-92d2-a5553c3c3760" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 621.576s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2015.811422] env[68798]: DEBUG oslo_concurrency.lockutils [None req-1973c9c7-7e78-4b54-9797-2618fe437b73 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Lock "71c99eda-d55d-4d60-92d2-a5553c3c3760" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 425.772s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2015.811673] env[68798]: DEBUG oslo_concurrency.lockutils [None req-1973c9c7-7e78-4b54-9797-2618fe437b73 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Acquiring lock "71c99eda-d55d-4d60-92d2-a5553c3c3760-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2015.811907] env[68798]: DEBUG oslo_concurrency.lockutils [None req-1973c9c7-7e78-4b54-9797-2618fe437b73 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Lock "71c99eda-d55d-4d60-92d2-a5553c3c3760-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2015.812099] env[68798]: DEBUG oslo_concurrency.lockutils [None req-1973c9c7-7e78-4b54-9797-2618fe437b73 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Lock "71c99eda-d55d-4d60-92d2-a5553c3c3760-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2015.814198] env[68798]: INFO nova.compute.manager [None req-1973c9c7-7e78-4b54-9797-2618fe437b73 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Terminating instance [ 2015.816053] env[68798]: DEBUG nova.compute.manager [None req-1973c9c7-7e78-4b54-9797-2618fe437b73 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2015.816257] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-1973c9c7-7e78-4b54-9797-2618fe437b73 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2015.816787] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7bec51ad-ac0f-452b-9cce-2852bb906a01 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.828256] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-735e4e8d-7b82-4aa8-892d-7820023829f4 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.859984] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-1973c9c7-7e78-4b54-9797-2618fe437b73 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 71c99eda-d55d-4d60-92d2-a5553c3c3760 could not be found. [ 2015.860217] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-1973c9c7-7e78-4b54-9797-2618fe437b73 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2015.860419] env[68798]: INFO nova.compute.manager [None req-1973c9c7-7e78-4b54-9797-2618fe437b73 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2015.860727] env[68798]: DEBUG oslo.service.loopingcall [None req-1973c9c7-7e78-4b54-9797-2618fe437b73 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2015.860943] env[68798]: DEBUG nova.compute.manager [-] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2015.861058] env[68798]: DEBUG nova.network.neutron [-] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2015.888471] env[68798]: DEBUG nova.network.neutron [-] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2015.897052] env[68798]: INFO nova.compute.manager [-] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] Took 0.04 seconds to deallocate network for instance. [ 2016.002719] env[68798]: DEBUG oslo_concurrency.lockutils [None req-1973c9c7-7e78-4b54-9797-2618fe437b73 tempest-ServerRescueTestJSONUnderV235-170076973 tempest-ServerRescueTestJSONUnderV235-170076973-project-member] Lock "71c99eda-d55d-4d60-92d2-a5553c3c3760" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.191s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2016.003558] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "71c99eda-d55d-4d60-92d2-a5553c3c3760" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 226.904s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2016.003748] env[68798]: INFO nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 71c99eda-d55d-4d60-92d2-a5553c3c3760] During sync_power_state the instance has a pending task (deleting). Skip. [ 2016.003924] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "71c99eda-d55d-4d60-92d2-a5553c3c3760" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2016.065961] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2017.047883] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2020.050338] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2023.051040] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2023.062923] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2023.063194] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2023.063382] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2023.063612] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2023.064758] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc958ad3-48b8-4803-9861-504a9693c933 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.074337] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6033bf3-0b79-462d-b9c2-9b390a3257ac {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.088549] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31563c80-a6e8-47a5-ade2-50f7f741afc3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.095060] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e234026-7b5e-4a82-aab5-b784e38efdb5 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.125229] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180758MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2023.125416] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2023.125571] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2023.192957] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance a4e41ed1-2b39-4475-bd13-1680ff46ff6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2023.192957] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ab6c3a4b-2208-49c8-b92f-1f08c0b225f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2023.194132] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 57a34323-ebdd-4495-ab62-f7b82ab804d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2023.194132] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 6f0e769a-33db-48c6-9a88-cceb310cb819 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2023.194132] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ce408b93-3713-4819-8c80-63735d9a5467 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2023.194132] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance de697c7a-bcc4-4d01-a9ec-8467e89d4ada actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2023.194132] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 538aeb6d-0aca-4d72-af14-859f4397514b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2023.194132] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 5a65c46e-989d-4a8f-9387-86cde7725173 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2023.194132] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2023.194132] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2023.303392] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e04c2773-3599-4458-8872-4ce055fddc42 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.311727] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187c3a0c-6bad-4934-be73-e261a4df8020 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.341085] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6843892-1906-4bec-a550-781f2e80b2d9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.348703] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6053c99-e256-4369-985a-e5b721610619 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.362920] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2023.371870] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2023.386517] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2023.386724] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.261s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2024.381022] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2063.065085] env[68798]: WARNING oslo_vmware.rw_handles [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2063.065085] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2063.065085] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2063.065085] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2063.065085] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2063.065085] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 2063.065085] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2063.065085] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2063.065085] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2063.065085] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2063.065085] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2063.065085] env[68798]: ERROR oslo_vmware.rw_handles [ 2063.065736] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/375e522f-2522-4058-83ea-f0f8f5c7f996/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2063.068427] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2063.068701] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Copying Virtual Disk [datastore1] vmware_temp/375e522f-2522-4058-83ea-f0f8f5c7f996/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/375e522f-2522-4058-83ea-f0f8f5c7f996/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2063.069028] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b67a2417-61b6-4d4e-b9da-700efb8ed28c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.077830] env[68798]: DEBUG oslo_vmware.api [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Waiting for the task: (returnval){ [ 2063.077830] env[68798]: value = "task-4217720" [ 2063.077830] env[68798]: _type = "Task" [ 2063.077830] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2063.086754] env[68798]: DEBUG oslo_vmware.api [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Task: {'id': task-4217720, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.588169] env[68798]: DEBUG oslo_vmware.exceptions [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2063.588410] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2063.589000] env[68798]: ERROR nova.compute.manager [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2063.589000] env[68798]: Faults: ['InvalidArgument'] [ 2063.589000] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Traceback (most recent call last): [ 2063.589000] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2063.589000] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] yield resources [ 2063.589000] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2063.589000] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] self.driver.spawn(context, instance, image_meta, [ 2063.589000] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2063.589000] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2063.589000] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2063.589000] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] self._fetch_image_if_missing(context, vi) [ 2063.589000] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2063.589000] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] image_cache(vi, tmp_image_ds_loc) [ 2063.589000] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2063.589000] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] vm_util.copy_virtual_disk( [ 2063.589000] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2063.589000] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] session._wait_for_task(vmdk_copy_task) [ 2063.589000] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2063.589000] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] return self.wait_for_task(task_ref) [ 2063.589000] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2063.589000] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] return evt.wait() [ 2063.589000] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2063.589000] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] result = hub.switch() [ 2063.589000] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2063.589000] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] return self.greenlet.switch() [ 2063.589000] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2063.589000] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] self.f(*self.args, **self.kw) [ 2063.589000] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2063.589000] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] raise exceptions.translate_fault(task_info.error) [ 2063.589000] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2063.589000] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Faults: ['InvalidArgument'] [ 2063.589000] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] [ 2063.589957] env[68798]: INFO nova.compute.manager [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Terminating instance [ 2063.591090] env[68798]: DEBUG oslo_concurrency.lockutils [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2063.591305] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2063.591576] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-846bcb77-1b24-4e8d-993a-003e54fe81f4 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.593924] env[68798]: DEBUG nova.compute.manager [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2063.594118] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2063.594865] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-810d13e3-2fa3-4482-b707-0f8cddc592d6 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.602164] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2063.602394] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-197dc4e7-0c01-4e3f-91f2-e6fb3093c0c4 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.604724] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2063.604900] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2063.605887] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f0438c8-9a5b-4c84-bba6-e5dea433fff8 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.610954] env[68798]: DEBUG oslo_vmware.api [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Waiting for the task: (returnval){ [ 2063.610954] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]5266ba80-73e3-41c0-b843-3c9a94f3faa0" [ 2063.610954] env[68798]: _type = "Task" [ 2063.610954] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2063.619263] env[68798]: DEBUG oslo_vmware.api [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]5266ba80-73e3-41c0-b843-3c9a94f3faa0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.678646] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2063.678905] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2063.679110] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Deleting the datastore file [datastore1] a4e41ed1-2b39-4475-bd13-1680ff46ff6f {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2063.679432] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b13dc07a-87f1-42ce-8a68-253d708c8804 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.687139] env[68798]: DEBUG oslo_vmware.api [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Waiting for the task: (returnval){ [ 2063.687139] env[68798]: value = "task-4217722" [ 2063.687139] env[68798]: _type = "Task" [ 2063.687139] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2063.695379] env[68798]: DEBUG oslo_vmware.api [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Task: {'id': task-4217722, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.121635] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2064.122102] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Creating directory with path [datastore1] vmware_temp/689f0452-87bd-4dc3-af82-de227da0a157/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2064.122102] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-34790ecc-5a1c-482e-8ea4-d67fdac67e05 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.135946] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Created directory with path [datastore1] vmware_temp/689f0452-87bd-4dc3-af82-de227da0a157/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2064.136183] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Fetch image to [datastore1] vmware_temp/689f0452-87bd-4dc3-af82-de227da0a157/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2064.136359] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/689f0452-87bd-4dc3-af82-de227da0a157/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2064.137204] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a19e3c-f49b-439b-96c4-9abbf1179de3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.145270] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-379ec8ba-d2f0-4da9-bf4c-e42255fd1db0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.155863] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b12866-0ec7-48ab-b6dc-8e44d56a5c3f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.186963] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e2f290-9817-428d-a4e7-324557a4a49a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.199324] env[68798]: DEBUG oslo_vmware.api [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Task: {'id': task-4217722, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101884} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2064.199900] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2064.200110] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2064.200293] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2064.200469] env[68798]: INFO nova.compute.manager [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2064.202079] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c9aa8456-3620-46b4-b527-84d9ceca05fe {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.204178] env[68798]: DEBUG nova.compute.claims [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2064.204359] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2064.204571] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2064.230045] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2064.289548] env[68798]: DEBUG oslo_vmware.rw_handles [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/689f0452-87bd-4dc3-af82-de227da0a157/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2064.347683] env[68798]: DEBUG oslo_vmware.rw_handles [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2064.347885] env[68798]: DEBUG oslo_vmware.rw_handles [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/689f0452-87bd-4dc3-af82-de227da0a157/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2064.416398] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39dc2234-fb8b-4080-98be-61910748a448 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.424950] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c574a64-842a-426a-a3cb-5b4950c00238 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.457161] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dca4f5f-c07a-4331-a08d-6fff2d520cf7 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.465326] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73556cff-d990-4e80-9929-6cbce7d55165 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.284935] env[68798]: DEBUG nova.compute.provider_tree [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2065.293921] env[68798]: DEBUG nova.scheduler.client.report [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2065.307559] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.103s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2065.308105] env[68798]: ERROR nova.compute.manager [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2065.308105] env[68798]: Faults: ['InvalidArgument'] [ 2065.308105] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Traceback (most recent call last): [ 2065.308105] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2065.308105] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] self.driver.spawn(context, instance, image_meta, [ 2065.308105] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2065.308105] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2065.308105] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2065.308105] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] self._fetch_image_if_missing(context, vi) [ 2065.308105] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2065.308105] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] image_cache(vi, tmp_image_ds_loc) [ 2065.308105] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2065.308105] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] vm_util.copy_virtual_disk( [ 2065.308105] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2065.308105] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] session._wait_for_task(vmdk_copy_task) [ 2065.308105] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2065.308105] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] return self.wait_for_task(task_ref) [ 2065.308105] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2065.308105] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] return evt.wait() [ 2065.308105] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2065.308105] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] result = hub.switch() [ 2065.308105] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2065.308105] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] return self.greenlet.switch() [ 2065.308105] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2065.308105] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] self.f(*self.args, **self.kw) [ 2065.308105] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2065.308105] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] raise exceptions.translate_fault(task_info.error) [ 2065.308105] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2065.308105] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Faults: ['InvalidArgument'] [ 2065.308105] env[68798]: ERROR nova.compute.manager [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] [ 2065.308882] env[68798]: DEBUG nova.compute.utils [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2065.310294] env[68798]: DEBUG nova.compute.manager [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Build of instance a4e41ed1-2b39-4475-bd13-1680ff46ff6f was re-scheduled: A specified parameter was not correct: fileType [ 2065.310294] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2065.310661] env[68798]: DEBUG nova.compute.manager [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2065.310833] env[68798]: DEBUG nova.compute.manager [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2065.311008] env[68798]: DEBUG nova.compute.manager [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2065.311178] env[68798]: DEBUG nova.network.neutron [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2065.774671] env[68798]: DEBUG nova.network.neutron [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2065.787167] env[68798]: INFO nova.compute.manager [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Took 0.48 seconds to deallocate network for instance. [ 2065.892027] env[68798]: INFO nova.scheduler.client.report [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Deleted allocations for instance a4e41ed1-2b39-4475-bd13-1680ff46ff6f [ 2065.923694] env[68798]: DEBUG oslo_concurrency.lockutils [None req-bd0ff178-4cba-4e27-a113-ec4e9489c436 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Lock "a4e41ed1-2b39-4475-bd13-1680ff46ff6f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 613.934s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2065.924066] env[68798]: DEBUG oslo_concurrency.lockutils [None req-844d6dc2-b58e-40ae-b7cb-c4e4d8cd4f32 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Lock "a4e41ed1-2b39-4475-bd13-1680ff46ff6f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 418.199s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2065.924313] env[68798]: DEBUG oslo_concurrency.lockutils [None req-844d6dc2-b58e-40ae-b7cb-c4e4d8cd4f32 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Acquiring lock "a4e41ed1-2b39-4475-bd13-1680ff46ff6f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2065.924526] env[68798]: DEBUG oslo_concurrency.lockutils [None req-844d6dc2-b58e-40ae-b7cb-c4e4d8cd4f32 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Lock "a4e41ed1-2b39-4475-bd13-1680ff46ff6f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2065.924732] env[68798]: DEBUG oslo_concurrency.lockutils [None req-844d6dc2-b58e-40ae-b7cb-c4e4d8cd4f32 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Lock "a4e41ed1-2b39-4475-bd13-1680ff46ff6f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2065.927543] env[68798]: INFO nova.compute.manager [None req-844d6dc2-b58e-40ae-b7cb-c4e4d8cd4f32 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Terminating instance [ 2065.930087] env[68798]: DEBUG nova.compute.manager [None req-844d6dc2-b58e-40ae-b7cb-c4e4d8cd4f32 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2065.930087] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-844d6dc2-b58e-40ae-b7cb-c4e4d8cd4f32 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2065.930347] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3fa0042d-0bee-4b93-aef3-42aaa47381a5 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.940237] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccc50ccc-a74c-4b2f-b267-f9dcef2fe8e2 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.969921] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-844d6dc2-b58e-40ae-b7cb-c4e4d8cd4f32 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a4e41ed1-2b39-4475-bd13-1680ff46ff6f could not be found. [ 2065.970154] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-844d6dc2-b58e-40ae-b7cb-c4e4d8cd4f32 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2065.970341] env[68798]: INFO nova.compute.manager [None req-844d6dc2-b58e-40ae-b7cb-c4e4d8cd4f32 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2065.970607] env[68798]: DEBUG oslo.service.loopingcall [None req-844d6dc2-b58e-40ae-b7cb-c4e4d8cd4f32 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2065.970887] env[68798]: DEBUG nova.compute.manager [-] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2065.970989] env[68798]: DEBUG nova.network.neutron [-] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2065.997632] env[68798]: DEBUG nova.network.neutron [-] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2066.006844] env[68798]: INFO nova.compute.manager [-] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] Took 0.04 seconds to deallocate network for instance. [ 2066.108469] env[68798]: DEBUG oslo_concurrency.lockutils [None req-844d6dc2-b58e-40ae-b7cb-c4e4d8cd4f32 tempest-ServerAddressesNegativeTestJSON-701747525 tempest-ServerAddressesNegativeTestJSON-701747525-project-member] Lock "a4e41ed1-2b39-4475-bd13-1680ff46ff6f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.184s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2066.109656] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "a4e41ed1-2b39-4475-bd13-1680ff46ff6f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 277.010s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2066.109859] env[68798]: INFO nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: a4e41ed1-2b39-4475-bd13-1680ff46ff6f] During sync_power_state the instance has a pending task (deleting). Skip. [ 2066.110059] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "a4e41ed1-2b39-4475-bd13-1680ff46ff6f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2071.048852] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2074.048737] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2075.048327] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2075.048514] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 2077.044750] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2077.048381] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2077.048541] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 2077.048661] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2077.068456] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2077.068630] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2077.068742] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2077.068872] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2077.069010] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2077.069319] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2077.069455] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2077.069628] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 2077.070114] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2078.048857] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2078.049264] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2078.049368] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Cleaning up deleted instances {{(pid=68798) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11220}} [ 2078.058919] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] There are 0 instances to clean {{(pid=68798) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 2081.058876] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2084.048816] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2084.061752] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2084.062062] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2084.062240] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2084.062399] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2084.063575] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5567f2fd-a8e3-489c-a094-0ced7b91ff2f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.072517] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05712fec-454d-474b-a99a-b5c1640d841d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.086925] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3937a52-04d8-4e90-83b9-c3da6ec6cbe8 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.093869] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb54a413-427a-4f02-81ad-cc41e9d8683d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.123084] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180760MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2084.123238] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2084.123427] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2084.272379] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ab6c3a4b-2208-49c8-b92f-1f08c0b225f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2084.272693] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 57a34323-ebdd-4495-ab62-f7b82ab804d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2084.272946] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 6f0e769a-33db-48c6-9a88-cceb310cb819 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2084.273132] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ce408b93-3713-4819-8c80-63735d9a5467 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2084.273358] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance de697c7a-bcc4-4d01-a9ec-8467e89d4ada actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2084.273691] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 538aeb6d-0aca-4d72-af14-859f4397514b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2084.273931] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 5a65c46e-989d-4a8f-9387-86cde7725173 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2084.274305] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2084.274528] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2084.291252] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Refreshing inventories for resource provider 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2084.306952] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Updating ProviderTree inventory for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2084.307161] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Updating inventory in ProviderTree for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2084.319449] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Refreshing aggregate associations for resource provider 855bb535-a51f-4f9d-8f32-8a3291b17319, aggregates: None {{(pid=68798) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2084.337867] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Refreshing trait associations for resource provider 855bb535-a51f-4f9d-8f32-8a3291b17319, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=68798) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2084.434093] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13265457-da09-4cb5-b51d-4759254baf3c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.442144] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3dc7062-08b7-41a6-b5b7-201bd185b98d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.472132] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feaf7893-055a-4ae8-a1a7-f0261a475abf {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.480464] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e3bb826-0011-4501-9eb3-68032d48cf9f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.494513] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2084.504036] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2084.519676] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2084.519888] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.396s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2090.049664] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2090.050186] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Cleaning up deleted instances with incomplete migration {{(pid=68798) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11258}} [ 2090.060159] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2112.117725] env[68798]: WARNING oslo_vmware.rw_handles [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2112.117725] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2112.117725] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2112.117725] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2112.117725] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2112.117725] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 2112.117725] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2112.117725] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2112.117725] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2112.117725] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2112.117725] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2112.117725] env[68798]: ERROR oslo_vmware.rw_handles [ 2112.118402] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/689f0452-87bd-4dc3-af82-de227da0a157/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2112.120695] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2112.120970] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Copying Virtual Disk [datastore1] vmware_temp/689f0452-87bd-4dc3-af82-de227da0a157/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/689f0452-87bd-4dc3-af82-de227da0a157/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2112.122662] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-09ffa78b-d5f9-41f1-a695-d8b835ba2bd5 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.131392] env[68798]: DEBUG oslo_vmware.api [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Waiting for the task: (returnval){ [ 2112.131392] env[68798]: value = "task-4217723" [ 2112.131392] env[68798]: _type = "Task" [ 2112.131392] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2112.140082] env[68798]: DEBUG oslo_vmware.api [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Task: {'id': task-4217723, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2112.642288] env[68798]: DEBUG oslo_vmware.exceptions [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2112.642551] env[68798]: DEBUG oslo_concurrency.lockutils [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2112.643127] env[68798]: ERROR nova.compute.manager [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2112.643127] env[68798]: Faults: ['InvalidArgument'] [ 2112.643127] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Traceback (most recent call last): [ 2112.643127] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2112.643127] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] yield resources [ 2112.643127] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2112.643127] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] self.driver.spawn(context, instance, image_meta, [ 2112.643127] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2112.643127] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2112.643127] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2112.643127] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] self._fetch_image_if_missing(context, vi) [ 2112.643127] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2112.643127] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] image_cache(vi, tmp_image_ds_loc) [ 2112.643127] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2112.643127] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] vm_util.copy_virtual_disk( [ 2112.643127] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2112.643127] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] session._wait_for_task(vmdk_copy_task) [ 2112.643127] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2112.643127] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] return self.wait_for_task(task_ref) [ 2112.643127] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2112.643127] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] return evt.wait() [ 2112.643127] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2112.643127] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] result = hub.switch() [ 2112.643127] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2112.643127] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] return self.greenlet.switch() [ 2112.643127] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2112.643127] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] self.f(*self.args, **self.kw) [ 2112.643127] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2112.643127] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] raise exceptions.translate_fault(task_info.error) [ 2112.643127] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2112.643127] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Faults: ['InvalidArgument'] [ 2112.643127] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] [ 2112.644142] env[68798]: INFO nova.compute.manager [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Terminating instance [ 2112.645046] env[68798]: DEBUG oslo_concurrency.lockutils [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2112.645276] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2112.645527] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a0559173-8864-4df4-8aa2-c0b27f1530bb {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.647913] env[68798]: DEBUG nova.compute.manager [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2112.648123] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2112.648851] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d794e234-545b-4b08-b638-82dfd4302f71 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.655740] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2112.655970] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b6c2f1a6-ab29-48b5-9518-94af4eca3368 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.658188] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2112.658364] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2112.659314] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db101a67-bc24-42fc-8e12-0af5a543df27 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.664368] env[68798]: DEBUG oslo_vmware.api [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Waiting for the task: (returnval){ [ 2112.664368] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]522147bc-d1ee-95f2-5393-42ac25287bf0" [ 2112.664368] env[68798]: _type = "Task" [ 2112.664368] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2112.679162] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2112.679434] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Creating directory with path [datastore1] vmware_temp/edc807b0-1374-467e-bf82-c834eda34e51/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2112.679674] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-292d2d54-d59d-4506-aacb-db242dc3981b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.702280] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Created directory with path [datastore1] vmware_temp/edc807b0-1374-467e-bf82-c834eda34e51/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2112.702529] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Fetch image to [datastore1] vmware_temp/edc807b0-1374-467e-bf82-c834eda34e51/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2112.702709] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/edc807b0-1374-467e-bf82-c834eda34e51/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2112.703584] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f77ef754-27a3-40a1-baf7-b66fa4735f1f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.711441] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2109be4-ffd5-4179-b98b-e9461525287f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.722090] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9289bb5c-0469-49b5-b38a-183a89c5dc54 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.754854] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-115be093-1822-4073-90b6-2e60756a7f7a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.757333] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2112.757539] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2112.757719] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Deleting the datastore file [datastore1] ab6c3a4b-2208-49c8-b92f-1f08c0b225f3 {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2112.757996] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-679afbfb-9e3d-4dc8-915f-9fa7ba4e27df {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.763377] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9b3c326c-3552-41ea-a97f-67c2221f7b09 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.766292] env[68798]: DEBUG oslo_vmware.api [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Waiting for the task: (returnval){ [ 2112.766292] env[68798]: value = "task-4217725" [ 2112.766292] env[68798]: _type = "Task" [ 2112.766292] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2112.774221] env[68798]: DEBUG oslo_vmware.api [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Task: {'id': task-4217725, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2112.787808] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2112.932716] env[68798]: DEBUG oslo_vmware.rw_handles [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/edc807b0-1374-467e-bf82-c834eda34e51/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2112.992699] env[68798]: DEBUG oslo_vmware.rw_handles [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2112.992933] env[68798]: DEBUG oslo_vmware.rw_handles [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/edc807b0-1374-467e-bf82-c834eda34e51/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2113.278210] env[68798]: DEBUG oslo_vmware.api [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Task: {'id': task-4217725, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071639} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2113.278561] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2113.278605] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2113.278753] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2113.278920] env[68798]: INFO nova.compute.manager [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Took 0.63 seconds to destroy the instance on the hypervisor. [ 2113.280939] env[68798]: DEBUG nova.compute.claims [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2113.281130] env[68798]: DEBUG oslo_concurrency.lockutils [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2113.281359] env[68798]: DEBUG oslo_concurrency.lockutils [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2113.424599] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc1c3c0-647f-42d3-aab0-f7666aa9033f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.432117] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca4dfe3-c4ce-44a5-881d-8a30f62b707e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.462946] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b89dad1-b1fa-4d37-b97b-312cdaa22df7 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.470739] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3bf1dd1-631d-4d80-b6f1-a3ba8a7cee2d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.484460] env[68798]: DEBUG nova.compute.provider_tree [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2113.493184] env[68798]: DEBUG nova.scheduler.client.report [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2113.508920] env[68798]: DEBUG oslo_concurrency.lockutils [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.227s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2113.509460] env[68798]: ERROR nova.compute.manager [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2113.509460] env[68798]: Faults: ['InvalidArgument'] [ 2113.509460] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Traceback (most recent call last): [ 2113.509460] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2113.509460] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] self.driver.spawn(context, instance, image_meta, [ 2113.509460] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2113.509460] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2113.509460] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2113.509460] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] self._fetch_image_if_missing(context, vi) [ 2113.509460] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2113.509460] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] image_cache(vi, tmp_image_ds_loc) [ 2113.509460] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2113.509460] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] vm_util.copy_virtual_disk( [ 2113.509460] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2113.509460] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] session._wait_for_task(vmdk_copy_task) [ 2113.509460] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2113.509460] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] return self.wait_for_task(task_ref) [ 2113.509460] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2113.509460] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] return evt.wait() [ 2113.509460] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2113.509460] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] result = hub.switch() [ 2113.509460] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2113.509460] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] return self.greenlet.switch() [ 2113.509460] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2113.509460] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] self.f(*self.args, **self.kw) [ 2113.509460] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2113.509460] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] raise exceptions.translate_fault(task_info.error) [ 2113.509460] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2113.509460] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Faults: ['InvalidArgument'] [ 2113.509460] env[68798]: ERROR nova.compute.manager [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] [ 2113.510291] env[68798]: DEBUG nova.compute.utils [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2113.511976] env[68798]: DEBUG nova.compute.manager [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Build of instance ab6c3a4b-2208-49c8-b92f-1f08c0b225f3 was re-scheduled: A specified parameter was not correct: fileType [ 2113.511976] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2113.512377] env[68798]: DEBUG nova.compute.manager [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2113.512553] env[68798]: DEBUG nova.compute.manager [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2113.512726] env[68798]: DEBUG nova.compute.manager [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2113.512906] env[68798]: DEBUG nova.network.neutron [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2113.950657] env[68798]: DEBUG nova.network.neutron [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2113.963289] env[68798]: INFO nova.compute.manager [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Took 0.45 seconds to deallocate network for instance. [ 2114.063323] env[68798]: INFO nova.scheduler.client.report [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Deleted allocations for instance ab6c3a4b-2208-49c8-b92f-1f08c0b225f3 [ 2114.086577] env[68798]: DEBUG oslo_concurrency.lockutils [None req-3732efb4-fa87-4875-8a9b-f1154ee858c3 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Lock "ab6c3a4b-2208-49c8-b92f-1f08c0b225f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 630.401s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2114.086835] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4afe080c-db82-4e24-bcd4-f5e46579ec80 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Lock "ab6c3a4b-2208-49c8-b92f-1f08c0b225f3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 433.942s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2114.087146] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4afe080c-db82-4e24-bcd4-f5e46579ec80 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Acquiring lock "ab6c3a4b-2208-49c8-b92f-1f08c0b225f3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2114.087451] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4afe080c-db82-4e24-bcd4-f5e46579ec80 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Lock "ab6c3a4b-2208-49c8-b92f-1f08c0b225f3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2114.087651] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4afe080c-db82-4e24-bcd4-f5e46579ec80 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Lock "ab6c3a4b-2208-49c8-b92f-1f08c0b225f3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2114.090058] env[68798]: INFO nova.compute.manager [None req-4afe080c-db82-4e24-bcd4-f5e46579ec80 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Terminating instance [ 2114.093670] env[68798]: DEBUG nova.compute.manager [None req-4afe080c-db82-4e24-bcd4-f5e46579ec80 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2114.093868] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-4afe080c-db82-4e24-bcd4-f5e46579ec80 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2114.094150] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bc5cfc85-45ef-4271-b5ca-28dd5c9fd9a7 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.102780] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e3fdf2f-f502-4d5b-b6e5-da260c09be6d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.130593] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-4afe080c-db82-4e24-bcd4-f5e46579ec80 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ab6c3a4b-2208-49c8-b92f-1f08c0b225f3 could not be found. [ 2114.130806] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-4afe080c-db82-4e24-bcd4-f5e46579ec80 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2114.130985] env[68798]: INFO nova.compute.manager [None req-4afe080c-db82-4e24-bcd4-f5e46579ec80 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2114.131262] env[68798]: DEBUG oslo.service.loopingcall [None req-4afe080c-db82-4e24-bcd4-f5e46579ec80 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2114.131488] env[68798]: DEBUG nova.compute.manager [-] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2114.131584] env[68798]: DEBUG nova.network.neutron [-] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2114.167022] env[68798]: DEBUG nova.network.neutron [-] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2114.177183] env[68798]: INFO nova.compute.manager [-] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] Took 0.05 seconds to deallocate network for instance. [ 2114.267482] env[68798]: DEBUG oslo_concurrency.lockutils [None req-4afe080c-db82-4e24-bcd4-f5e46579ec80 tempest-ServersNegativeTestJSON-125149055 tempest-ServersNegativeTestJSON-125149055-project-member] Lock "ab6c3a4b-2208-49c8-b92f-1f08c0b225f3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.180s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2114.268511] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "ab6c3a4b-2208-49c8-b92f-1f08c0b225f3" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 325.169s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2114.268718] env[68798]: INFO nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: ab6c3a4b-2208-49c8-b92f-1f08c0b225f3] During sync_power_state the instance has a pending task (deleting). Skip. [ 2114.268899] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "ab6c3a4b-2208-49c8-b92f-1f08c0b225f3" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2131.753546] env[68798]: DEBUG oslo_concurrency.lockutils [None req-745d2f99-854b-4a17-97f3-81a114f8169b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Acquiring lock "538aeb6d-0aca-4d72-af14-859f4397514b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2132.065620] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2135.048431] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2137.048376] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2137.048735] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 2138.048681] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2138.049046] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 2138.049046] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2138.065458] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2138.065458] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2138.065458] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2138.065649] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2138.065649] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2138.065782] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2138.065903] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 2138.066595] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2138.066789] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2139.062422] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2142.049946] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2145.044113] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2146.049407] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2146.062309] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2146.062564] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2146.062721] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2146.062874] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2146.064087] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c4e37b-6e6d-40df-ab19-f7bbb7429398 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.073240] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0fbbf28-5b01-4e57-84e6-bf07b4a4fda3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.088093] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33066650-def9-4e5e-ad1d-32fa18becbb8 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.094752] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1965a02-d07f-44f8-b1d9-f4105a388a9a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.123838] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180760MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2146.124058] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2146.124234] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2146.188052] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 57a34323-ebdd-4495-ab62-f7b82ab804d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2146.188196] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 6f0e769a-33db-48c6-9a88-cceb310cb819 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2146.188238] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ce408b93-3713-4819-8c80-63735d9a5467 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2146.188341] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance de697c7a-bcc4-4d01-a9ec-8467e89d4ada actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2146.188461] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 538aeb6d-0aca-4d72-af14-859f4397514b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2146.188581] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 5a65c46e-989d-4a8f-9387-86cde7725173 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2146.188795] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2146.189048] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2146.271461] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa26ea88-b7d8-4183-a995-2dd60a6f3bf2 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.280051] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ac689a0-07db-4518-84a7-5e993f8972eb {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.311537] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf781d53-af5f-41a9-b74f-26c5f3ebe145 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.319265] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faaf45b8-31f7-4998-99fb-cb6aa5528f64 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.332178] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2146.340395] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2146.355630] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2146.355827] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.232s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2160.877781] env[68798]: WARNING oslo_vmware.rw_handles [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2160.877781] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2160.877781] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2160.877781] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2160.877781] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2160.877781] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 2160.877781] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2160.877781] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2160.877781] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2160.877781] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2160.877781] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2160.877781] env[68798]: ERROR oslo_vmware.rw_handles [ 2160.878612] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/edc807b0-1374-467e-bf82-c834eda34e51/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2160.880270] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2160.880515] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Copying Virtual Disk [datastore1] vmware_temp/edc807b0-1374-467e-bf82-c834eda34e51/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/edc807b0-1374-467e-bf82-c834eda34e51/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2160.880801] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2f082b89-4c22-4711-a938-8acb0ef2062c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.888507] env[68798]: DEBUG oslo_vmware.api [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Waiting for the task: (returnval){ [ 2160.888507] env[68798]: value = "task-4217726" [ 2160.888507] env[68798]: _type = "Task" [ 2160.888507] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2160.897327] env[68798]: DEBUG oslo_vmware.api [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Task: {'id': task-4217726, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2161.399561] env[68798]: DEBUG oslo_vmware.exceptions [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2161.399855] env[68798]: DEBUG oslo_concurrency.lockutils [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2161.400454] env[68798]: ERROR nova.compute.manager [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2161.400454] env[68798]: Faults: ['InvalidArgument'] [ 2161.400454] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Traceback (most recent call last): [ 2161.400454] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2161.400454] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] yield resources [ 2161.400454] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2161.400454] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] self.driver.spawn(context, instance, image_meta, [ 2161.400454] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2161.400454] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2161.400454] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2161.400454] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] self._fetch_image_if_missing(context, vi) [ 2161.400454] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2161.400454] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] image_cache(vi, tmp_image_ds_loc) [ 2161.400454] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2161.400454] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] vm_util.copy_virtual_disk( [ 2161.400454] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2161.400454] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] session._wait_for_task(vmdk_copy_task) [ 2161.400454] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2161.400454] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] return self.wait_for_task(task_ref) [ 2161.400454] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2161.400454] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] return evt.wait() [ 2161.400454] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2161.400454] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] result = hub.switch() [ 2161.400454] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2161.400454] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] return self.greenlet.switch() [ 2161.400454] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2161.400454] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] self.f(*self.args, **self.kw) [ 2161.400454] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2161.400454] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] raise exceptions.translate_fault(task_info.error) [ 2161.400454] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2161.400454] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Faults: ['InvalidArgument'] [ 2161.400454] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] [ 2161.401465] env[68798]: INFO nova.compute.manager [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Terminating instance [ 2161.402406] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2161.402614] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2161.402859] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-39347391-b64e-4491-87ba-4fbedd2aba4a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.407764] env[68798]: DEBUG nova.compute.manager [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2161.407764] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2161.408119] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5988f881-6243-40b8-b150-f90d231d9208 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.412305] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2161.412481] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2161.415066] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-310db3c3-2ab6-4066-a912-c73c9b18c900 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.417243] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2161.417703] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e17a4b56-9064-459e-be92-4fee0de189f6 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.422258] env[68798]: DEBUG oslo_vmware.api [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Waiting for the task: (returnval){ [ 2161.422258] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]527735f8-cb2c-4624-3056-24d6ed5d2c2e" [ 2161.422258] env[68798]: _type = "Task" [ 2161.422258] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2161.429807] env[68798]: DEBUG oslo_vmware.api [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]527735f8-cb2c-4624-3056-24d6ed5d2c2e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2161.490777] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2161.491063] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2161.491266] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Deleting the datastore file [datastore1] 57a34323-ebdd-4495-ab62-f7b82ab804d9 {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2161.491548] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6c990453-bfb3-4223-aa6a-5b53d99b0f23 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.498669] env[68798]: DEBUG oslo_vmware.api [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Waiting for the task: (returnval){ [ 2161.498669] env[68798]: value = "task-4217728" [ 2161.498669] env[68798]: _type = "Task" [ 2161.498669] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2161.507151] env[68798]: DEBUG oslo_vmware.api [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Task: {'id': task-4217728, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2161.935433] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2161.935739] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Creating directory with path [datastore1] vmware_temp/a77537ad-f0ae-4a69-aa78-1e615a285e17/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2161.935953] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a495653a-2360-4563-bcdd-5c579754e85a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.949858] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Created directory with path [datastore1] vmware_temp/a77537ad-f0ae-4a69-aa78-1e615a285e17/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2161.950215] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Fetch image to [datastore1] vmware_temp/a77537ad-f0ae-4a69-aa78-1e615a285e17/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2161.950502] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/a77537ad-f0ae-4a69-aa78-1e615a285e17/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2161.951701] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3ef754f-3629-4a7c-8043-7d468805f726 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.960825] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a427cbc-d79b-4b37-87b5-d921fc369af1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.971392] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ef6e6fc-95de-42f2-91cf-1c0a0ee52983 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.008307] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a93f3e43-2a21-4871-8bf9-98aa361efdb4 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.017560] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-58bda2cc-32ec-4102-9e55-8550b8d62d67 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.019349] env[68798]: DEBUG oslo_vmware.api [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Task: {'id': task-4217728, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078174} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2162.019602] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2162.019787] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2162.019966] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2162.020188] env[68798]: INFO nova.compute.manager [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2162.022373] env[68798]: DEBUG nova.compute.claims [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2162.022563] env[68798]: DEBUG oslo_concurrency.lockutils [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2162.022776] env[68798]: DEBUG oslo_concurrency.lockutils [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2162.053137] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2162.109433] env[68798]: DEBUG oslo_vmware.rw_handles [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a77537ad-f0ae-4a69-aa78-1e615a285e17/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2162.169251] env[68798]: DEBUG oslo_vmware.rw_handles [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2162.169453] env[68798]: DEBUG oslo_vmware.rw_handles [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a77537ad-f0ae-4a69-aa78-1e615a285e17/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2162.200976] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b336bef5-ec77-4406-829d-75b2b025af4f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.208689] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d4637b-9db4-4279-a140-c5190de93d7a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.240284] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f12538d4-bde5-45e3-9cef-17b3d14d0640 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.248196] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c98579f-5056-42c2-a021-d93873a35b16 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.261618] env[68798]: DEBUG nova.compute.provider_tree [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2162.272612] env[68798]: DEBUG nova.scheduler.client.report [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2162.286062] env[68798]: DEBUG oslo_concurrency.lockutils [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.263s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2162.286676] env[68798]: ERROR nova.compute.manager [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2162.286676] env[68798]: Faults: ['InvalidArgument'] [ 2162.286676] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Traceback (most recent call last): [ 2162.286676] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2162.286676] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] self.driver.spawn(context, instance, image_meta, [ 2162.286676] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2162.286676] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2162.286676] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2162.286676] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] self._fetch_image_if_missing(context, vi) [ 2162.286676] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2162.286676] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] image_cache(vi, tmp_image_ds_loc) [ 2162.286676] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2162.286676] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] vm_util.copy_virtual_disk( [ 2162.286676] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2162.286676] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] session._wait_for_task(vmdk_copy_task) [ 2162.286676] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2162.286676] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] return self.wait_for_task(task_ref) [ 2162.286676] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2162.286676] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] return evt.wait() [ 2162.286676] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2162.286676] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] result = hub.switch() [ 2162.286676] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2162.286676] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] return self.greenlet.switch() [ 2162.286676] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2162.286676] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] self.f(*self.args, **self.kw) [ 2162.286676] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2162.286676] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] raise exceptions.translate_fault(task_info.error) [ 2162.286676] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2162.286676] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Faults: ['InvalidArgument'] [ 2162.286676] env[68798]: ERROR nova.compute.manager [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] [ 2162.287548] env[68798]: DEBUG nova.compute.utils [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2162.288954] env[68798]: DEBUG nova.compute.manager [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Build of instance 57a34323-ebdd-4495-ab62-f7b82ab804d9 was re-scheduled: A specified parameter was not correct: fileType [ 2162.288954] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2162.289352] env[68798]: DEBUG nova.compute.manager [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2162.289531] env[68798]: DEBUG nova.compute.manager [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2162.289706] env[68798]: DEBUG nova.compute.manager [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2162.289872] env[68798]: DEBUG nova.network.neutron [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2162.648692] env[68798]: DEBUG nova.network.neutron [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2162.671243] env[68798]: INFO nova.compute.manager [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Took 0.38 seconds to deallocate network for instance. [ 2162.781020] env[68798]: INFO nova.scheduler.client.report [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Deleted allocations for instance 57a34323-ebdd-4495-ab62-f7b82ab804d9 [ 2162.804854] env[68798]: DEBUG oslo_concurrency.lockutils [None req-020aea22-12fc-4802-804f-844f3134acbd tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "57a34323-ebdd-4495-ab62-f7b82ab804d9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 625.037s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2162.805106] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ae07cce4-3d91-4f6a-9966-a0934f520ac6 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "57a34323-ebdd-4495-ab62-f7b82ab804d9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 428.834s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2162.805361] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ae07cce4-3d91-4f6a-9966-a0934f520ac6 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Acquiring lock "57a34323-ebdd-4495-ab62-f7b82ab804d9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2162.805591] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ae07cce4-3d91-4f6a-9966-a0934f520ac6 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "57a34323-ebdd-4495-ab62-f7b82ab804d9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2162.805810] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ae07cce4-3d91-4f6a-9966-a0934f520ac6 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "57a34323-ebdd-4495-ab62-f7b82ab804d9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2162.808491] env[68798]: INFO nova.compute.manager [None req-ae07cce4-3d91-4f6a-9966-a0934f520ac6 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Terminating instance [ 2162.809547] env[68798]: DEBUG nova.compute.manager [None req-ae07cce4-3d91-4f6a-9966-a0934f520ac6 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2162.809738] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ae07cce4-3d91-4f6a-9966-a0934f520ac6 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2162.810344] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-146f0fed-810e-47e8-b6e8-c0f4ebdd0755 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.819916] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe932be-03c7-464a-b07b-ec72b4f70c36 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.846613] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-ae07cce4-3d91-4f6a-9966-a0934f520ac6 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 57a34323-ebdd-4495-ab62-f7b82ab804d9 could not be found. [ 2162.846827] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-ae07cce4-3d91-4f6a-9966-a0934f520ac6 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2162.847025] env[68798]: INFO nova.compute.manager [None req-ae07cce4-3d91-4f6a-9966-a0934f520ac6 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2162.847538] env[68798]: DEBUG oslo.service.loopingcall [None req-ae07cce4-3d91-4f6a-9966-a0934f520ac6 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2162.847779] env[68798]: DEBUG nova.compute.manager [-] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2162.847883] env[68798]: DEBUG nova.network.neutron [-] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2162.872298] env[68798]: DEBUG nova.network.neutron [-] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2162.880464] env[68798]: INFO nova.compute.manager [-] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] Took 0.03 seconds to deallocate network for instance. [ 2162.976510] env[68798]: DEBUG oslo_concurrency.lockutils [None req-ae07cce4-3d91-4f6a-9966-a0934f520ac6 tempest-ImagesTestJSON-406590470 tempest-ImagesTestJSON-406590470-project-member] Lock "57a34323-ebdd-4495-ab62-f7b82ab804d9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.171s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2162.978021] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "57a34323-ebdd-4495-ab62-f7b82ab804d9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 373.878s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2162.978021] env[68798]: INFO nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 57a34323-ebdd-4495-ab62-f7b82ab804d9] During sync_power_state the instance has a pending task (deleting). Skip. [ 2162.978021] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "57a34323-ebdd-4495-ab62-f7b82ab804d9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2192.356184] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2197.048768] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2198.047944] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2198.048178] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 2198.048300] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2198.066100] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2198.066503] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2198.066503] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2198.066578] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2198.066635] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2198.066757] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 2198.067300] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2198.067460] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 2199.048803] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2200.050167] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2201.044707] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2203.049078] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2206.348585] env[68798]: DEBUG oslo_concurrency.lockutils [None req-973a9bbf-b84f-4715-a371-689bfa1a2460 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquiring lock "5a65c46e-989d-4a8f-9387-86cde7725173" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2208.048452] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2208.061695] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2208.062236] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2208.062236] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2208.062457] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2208.063542] env[68798]: WARNING oslo_vmware.rw_handles [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2208.063542] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2208.063542] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2208.063542] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2208.063542] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2208.063542] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 2208.063542] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2208.063542] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2208.063542] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2208.063542] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2208.063542] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2208.063542] env[68798]: ERROR oslo_vmware.rw_handles [ 2208.063967] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/a77537ad-f0ae-4a69-aa78-1e615a285e17/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2208.065787] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2208.066098] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Copying Virtual Disk [datastore1] vmware_temp/a77537ad-f0ae-4a69-aa78-1e615a285e17/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/a77537ad-f0ae-4a69-aa78-1e615a285e17/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2208.067614] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d564666-8a36-45e3-ad68-37f7a756cfd1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.070604] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b86ccae6-1a33-4976-b897-5c45729a0b51 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.079732] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f888313-dbac-41dd-8fb5-aa7dfb968b5f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.083578] env[68798]: DEBUG oslo_vmware.api [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Waiting for the task: (returnval){ [ 2208.083578] env[68798]: value = "task-4217729" [ 2208.083578] env[68798]: _type = "Task" [ 2208.083578] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2208.095815] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a74c01d-8ffb-4bf4-9043-e723f778d635 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.100901] env[68798]: DEBUG oslo_vmware.api [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Task: {'id': task-4217729, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2208.105329] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dac2ee8-87b5-4f76-8a40-fc4b67490d72 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.134371] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180768MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2208.134522] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2208.134705] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2208.193732] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 6f0e769a-33db-48c6-9a88-cceb310cb819 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2208.193907] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance ce408b93-3713-4819-8c80-63735d9a5467 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2208.194053] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance de697c7a-bcc4-4d01-a9ec-8467e89d4ada actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2208.194189] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 538aeb6d-0aca-4d72-af14-859f4397514b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2208.194313] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 5a65c46e-989d-4a8f-9387-86cde7725173 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2208.194509] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2208.194641] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2208.263437] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c1312f4-69b3-44c1-ac1c-74ba81b63ba8 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.270886] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b24f6f49-e200-492c-bdb3-3136b1efa499 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.300644] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a4835e4-fee7-4ec1-a8b5-db8f4da0690a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.308020] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feee4979-200e-4e9e-ae06-875012f33cbc {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.322280] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2208.330235] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2208.343495] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2208.343682] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.209s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2208.594659] env[68798]: DEBUG oslo_vmware.exceptions [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2208.594967] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2208.595649] env[68798]: ERROR nova.compute.manager [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2208.595649] env[68798]: Faults: ['InvalidArgument'] [ 2208.595649] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Traceback (most recent call last): [ 2208.595649] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2208.595649] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] yield resources [ 2208.595649] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2208.595649] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] self.driver.spawn(context, instance, image_meta, [ 2208.595649] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2208.595649] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2208.595649] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2208.595649] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] self._fetch_image_if_missing(context, vi) [ 2208.595649] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2208.595649] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] image_cache(vi, tmp_image_ds_loc) [ 2208.595649] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2208.595649] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] vm_util.copy_virtual_disk( [ 2208.595649] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2208.595649] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] session._wait_for_task(vmdk_copy_task) [ 2208.595649] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2208.595649] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] return self.wait_for_task(task_ref) [ 2208.595649] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2208.595649] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] return evt.wait() [ 2208.595649] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2208.595649] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] result = hub.switch() [ 2208.595649] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2208.595649] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] return self.greenlet.switch() [ 2208.595649] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2208.595649] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] self.f(*self.args, **self.kw) [ 2208.595649] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2208.595649] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] raise exceptions.translate_fault(task_info.error) [ 2208.595649] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2208.595649] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Faults: ['InvalidArgument'] [ 2208.595649] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] [ 2208.596548] env[68798]: INFO nova.compute.manager [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Terminating instance [ 2208.597616] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2208.597839] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2208.598082] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce5759bb-a64f-4269-9312-715c76a5b74c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.600530] env[68798]: DEBUG nova.compute.manager [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2208.600806] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2208.601475] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ae8f72-01a8-4841-853a-523f55e4859d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.608997] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2208.609246] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6415ccf3-aaa6-4741-842d-2e69220975d3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.611626] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2208.611800] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2208.612778] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13ea586e-6514-40ba-84f4-3632b72c00fb {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.618041] env[68798]: DEBUG oslo_vmware.api [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Waiting for the task: (returnval){ [ 2208.618041] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52e1a61f-368f-f120-71ad-43c505c08e30" [ 2208.618041] env[68798]: _type = "Task" [ 2208.618041] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2208.625090] env[68798]: DEBUG oslo_vmware.api [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52e1a61f-368f-f120-71ad-43c505c08e30, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2208.684489] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2208.684716] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2208.684897] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Deleting the datastore file [datastore1] 6f0e769a-33db-48c6-9a88-cceb310cb819 {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2208.685202] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-41420718-04ba-43ae-8033-94de868c937b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.692491] env[68798]: DEBUG oslo_vmware.api [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Waiting for the task: (returnval){ [ 2208.692491] env[68798]: value = "task-4217731" [ 2208.692491] env[68798]: _type = "Task" [ 2208.692491] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2208.700980] env[68798]: DEBUG oslo_vmware.api [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Task: {'id': task-4217731, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2209.127566] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2209.127861] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Creating directory with path [datastore1] vmware_temp/a7eeecf2-8a39-4243-a9db-1f6808deb829/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2209.128082] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ba0b632d-d9fa-42ce-a7eb-9d2cb0b6c4db {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.139567] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Created directory with path [datastore1] vmware_temp/a7eeecf2-8a39-4243-a9db-1f6808deb829/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2209.139757] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Fetch image to [datastore1] vmware_temp/a7eeecf2-8a39-4243-a9db-1f6808deb829/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2209.139930] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/a7eeecf2-8a39-4243-a9db-1f6808deb829/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2209.140687] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7f88a3f-a486-45c6-9efa-681ed5168f3c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.147505] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-155df1e5-fcaf-4196-8f7b-ee6d63df31ba {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.156479] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81a8ec91-3fcb-49c2-9829-90efc07f6e50 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.187925] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73246776-6810-49ef-891b-8f4abab1839b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.196939] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e13c7dc7-e0d2-498d-bdd8-886e32a27a36 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.203368] env[68798]: DEBUG oslo_vmware.api [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Task: {'id': task-4217731, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083417} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2209.203635] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2209.203820] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2209.203991] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2209.204205] env[68798]: INFO nova.compute.manager [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2209.206462] env[68798]: DEBUG nova.compute.claims [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2209.206706] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2209.207009] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2209.222253] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2209.323127] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e812198-7d40-4ebc-bdb9-c80772f510fc {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.331131] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c38d527-b89b-4a81-a841-a88f44f9826c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.363717] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-086a44a4-3484-457e-b1ea-6581d9f998c5 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.371607] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb9b3df-e393-430b-b960-32b73297afe0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.389090] env[68798]: DEBUG nova.compute.provider_tree [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2209.398365] env[68798]: DEBUG nova.scheduler.client.report [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2209.405212] env[68798]: DEBUG oslo_vmware.rw_handles [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a7eeecf2-8a39-4243-a9db-1f6808deb829/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2209.460224] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.253s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2209.460818] env[68798]: ERROR nova.compute.manager [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2209.460818] env[68798]: Faults: ['InvalidArgument'] [ 2209.460818] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Traceback (most recent call last): [ 2209.460818] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2209.460818] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] self.driver.spawn(context, instance, image_meta, [ 2209.460818] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2209.460818] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2209.460818] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2209.460818] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] self._fetch_image_if_missing(context, vi) [ 2209.460818] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2209.460818] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] image_cache(vi, tmp_image_ds_loc) [ 2209.460818] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2209.460818] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] vm_util.copy_virtual_disk( [ 2209.460818] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2209.460818] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] session._wait_for_task(vmdk_copy_task) [ 2209.460818] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2209.460818] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] return self.wait_for_task(task_ref) [ 2209.460818] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2209.460818] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] return evt.wait() [ 2209.460818] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2209.460818] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] result = hub.switch() [ 2209.460818] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2209.460818] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] return self.greenlet.switch() [ 2209.460818] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2209.460818] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] self.f(*self.args, **self.kw) [ 2209.460818] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2209.460818] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] raise exceptions.translate_fault(task_info.error) [ 2209.460818] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2209.460818] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Faults: ['InvalidArgument'] [ 2209.460818] env[68798]: ERROR nova.compute.manager [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] [ 2209.461738] env[68798]: DEBUG nova.compute.utils [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2209.464043] env[68798]: DEBUG nova.compute.manager [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Build of instance 6f0e769a-33db-48c6-9a88-cceb310cb819 was re-scheduled: A specified parameter was not correct: fileType [ 2209.464043] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2209.464433] env[68798]: DEBUG nova.compute.manager [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2209.464619] env[68798]: DEBUG nova.compute.manager [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2209.464795] env[68798]: DEBUG nova.compute.manager [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2209.464977] env[68798]: DEBUG nova.network.neutron [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2209.467107] env[68798]: DEBUG oslo_vmware.rw_handles [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2209.467279] env[68798]: DEBUG oslo_vmware.rw_handles [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a7eeecf2-8a39-4243-a9db-1f6808deb829/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2209.855986] env[68798]: DEBUG nova.network.neutron [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2209.866362] env[68798]: INFO nova.compute.manager [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Took 0.40 seconds to deallocate network for instance. [ 2209.981629] env[68798]: INFO nova.scheduler.client.report [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Deleted allocations for instance 6f0e769a-33db-48c6-9a88-cceb310cb819 [ 2210.003499] env[68798]: DEBUG oslo_concurrency.lockutils [None req-6cae5080-3bc7-44e5-8b5e-378605e66717 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "6f0e769a-33db-48c6-9a88-cceb310cb819" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 590.738s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2210.003763] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "6f0e769a-33db-48c6-9a88-cceb310cb819" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 420.904s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2210.003955] env[68798]: INFO nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] During sync_power_state the instance has a pending task (spawning). Skip. [ 2210.004151] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "6f0e769a-33db-48c6-9a88-cceb310cb819" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2210.004652] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f21e1f00-36e8-4683-9860-a74d02c4298f tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "6f0e769a-33db-48c6-9a88-cceb310cb819" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 395.154s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2210.004882] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f21e1f00-36e8-4683-9860-a74d02c4298f tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquiring lock "6f0e769a-33db-48c6-9a88-cceb310cb819-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2210.005101] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f21e1f00-36e8-4683-9860-a74d02c4298f tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "6f0e769a-33db-48c6-9a88-cceb310cb819-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2210.005269] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f21e1f00-36e8-4683-9860-a74d02c4298f tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "6f0e769a-33db-48c6-9a88-cceb310cb819-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2210.007139] env[68798]: INFO nova.compute.manager [None req-f21e1f00-36e8-4683-9860-a74d02c4298f tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Terminating instance [ 2210.009186] env[68798]: DEBUG nova.compute.manager [None req-f21e1f00-36e8-4683-9860-a74d02c4298f tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2210.009186] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f21e1f00-36e8-4683-9860-a74d02c4298f tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2210.009335] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ea0a6481-dcd7-47b3-a0ec-40c4bd2dd877 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.018416] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b14a8ac3-e195-45e4-b740-c93e30b206f3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.044704] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-f21e1f00-36e8-4683-9860-a74d02c4298f tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6f0e769a-33db-48c6-9a88-cceb310cb819 could not be found. [ 2210.044933] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-f21e1f00-36e8-4683-9860-a74d02c4298f tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2210.045126] env[68798]: INFO nova.compute.manager [None req-f21e1f00-36e8-4683-9860-a74d02c4298f tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2210.045374] env[68798]: DEBUG oslo.service.loopingcall [None req-f21e1f00-36e8-4683-9860-a74d02c4298f tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2210.045873] env[68798]: DEBUG nova.compute.manager [-] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2210.045974] env[68798]: DEBUG nova.network.neutron [-] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2210.069656] env[68798]: DEBUG nova.network.neutron [-] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2210.077885] env[68798]: INFO nova.compute.manager [-] [instance: 6f0e769a-33db-48c6-9a88-cceb310cb819] Took 0.03 seconds to deallocate network for instance. [ 2210.168605] env[68798]: DEBUG oslo_concurrency.lockutils [None req-f21e1f00-36e8-4683-9860-a74d02c4298f tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "6f0e769a-33db-48c6-9a88-cceb310cb819" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.164s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2253.343679] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2257.048449] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2257.169487] env[68798]: WARNING oslo_vmware.rw_handles [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2257.169487] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2257.169487] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2257.169487] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2257.169487] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2257.169487] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 2257.169487] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2257.169487] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2257.169487] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2257.169487] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2257.169487] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2257.169487] env[68798]: ERROR oslo_vmware.rw_handles [ 2257.169938] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/a7eeecf2-8a39-4243-a9db-1f6808deb829/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2257.171782] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2257.172104] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Copying Virtual Disk [datastore1] vmware_temp/a7eeecf2-8a39-4243-a9db-1f6808deb829/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/a7eeecf2-8a39-4243-a9db-1f6808deb829/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2257.172417] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a77a52d5-29f9-4234-96a5-31452d23c25f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.180249] env[68798]: DEBUG oslo_vmware.api [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Waiting for the task: (returnval){ [ 2257.180249] env[68798]: value = "task-4217732" [ 2257.180249] env[68798]: _type = "Task" [ 2257.180249] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2257.188531] env[68798]: DEBUG oslo_vmware.api [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Task: {'id': task-4217732, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2257.690866] env[68798]: DEBUG oslo_vmware.exceptions [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2257.691166] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2257.691746] env[68798]: ERROR nova.compute.manager [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2257.691746] env[68798]: Faults: ['InvalidArgument'] [ 2257.691746] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] Traceback (most recent call last): [ 2257.691746] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2257.691746] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] yield resources [ 2257.691746] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2257.691746] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] self.driver.spawn(context, instance, image_meta, [ 2257.691746] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2257.691746] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2257.691746] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2257.691746] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] self._fetch_image_if_missing(context, vi) [ 2257.691746] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2257.691746] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] image_cache(vi, tmp_image_ds_loc) [ 2257.691746] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2257.691746] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] vm_util.copy_virtual_disk( [ 2257.691746] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2257.691746] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] session._wait_for_task(vmdk_copy_task) [ 2257.691746] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2257.691746] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] return self.wait_for_task(task_ref) [ 2257.691746] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2257.691746] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] return evt.wait() [ 2257.691746] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2257.691746] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] result = hub.switch() [ 2257.691746] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2257.691746] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] return self.greenlet.switch() [ 2257.691746] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2257.691746] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] self.f(*self.args, **self.kw) [ 2257.691746] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2257.691746] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] raise exceptions.translate_fault(task_info.error) [ 2257.691746] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2257.691746] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] Faults: ['InvalidArgument'] [ 2257.691746] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] [ 2257.692730] env[68798]: INFO nova.compute.manager [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Terminating instance [ 2257.693704] env[68798]: DEBUG oslo_concurrency.lockutils [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2257.695040] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2257.695685] env[68798]: DEBUG nova.compute.manager [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2257.695882] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2257.696133] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5e07b27a-c62b-4239-b72e-978e775a8426 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.698404] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4333a8eb-3d0f-47b5-a459-4836108b399b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.705407] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2257.705626] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7b10b2cd-2140-4b49-8bb4-6ba013da6ecd {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.707785] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2257.707960] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2257.708943] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d92423b8-224e-4cb1-97f4-195ce7acd725 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.713796] env[68798]: DEBUG oslo_vmware.api [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Waiting for the task: (returnval){ [ 2257.713796] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]522dc75c-8f3f-88dd-a4ed-828f71535abe" [ 2257.713796] env[68798]: _type = "Task" [ 2257.713796] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2257.728901] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2257.729191] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Creating directory with path [datastore1] vmware_temp/d3cbccfc-6f27-4da2-a028-8a089df1c242/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2257.729438] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-caae2698-e93f-4e98-8b2a-c66a8610a518 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.750716] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Created directory with path [datastore1] vmware_temp/d3cbccfc-6f27-4da2-a028-8a089df1c242/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2257.750935] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Fetch image to [datastore1] vmware_temp/d3cbccfc-6f27-4da2-a028-8a089df1c242/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2257.751134] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/d3cbccfc-6f27-4da2-a028-8a089df1c242/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2257.751936] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fa8dd01-8cff-470c-8c89-cfe859dd93b6 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.759819] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28926a7f-03b5-499b-bdba-5dfc58c975cd {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.770372] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0825ec9b-683c-461c-9011-64112c868776 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.776900] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2257.777133] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2257.777313] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Deleting the datastore file [datastore1] ce408b93-3713-4819-8c80-63735d9a5467 {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2257.777575] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-750ef44c-a855-41cf-9ca9-778654b7209c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.805136] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-491c5152-bdc8-4129-ab16-d687cf98d295 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.809609] env[68798]: DEBUG oslo_vmware.api [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Waiting for the task: (returnval){ [ 2257.809609] env[68798]: value = "task-4217734" [ 2257.809609] env[68798]: _type = "Task" [ 2257.809609] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2257.814657] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5114233a-2b2c-4b6e-ba61-13ef2196c595 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.823216] env[68798]: DEBUG oslo_vmware.api [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Task: {'id': task-4217734, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2257.840703] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2257.899616] env[68798]: DEBUG oslo_vmware.rw_handles [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d3cbccfc-6f27-4da2-a028-8a089df1c242/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2257.959537] env[68798]: DEBUG oslo_vmware.rw_handles [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2257.959707] env[68798]: DEBUG oslo_vmware.rw_handles [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d3cbccfc-6f27-4da2-a028-8a089df1c242/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2258.048818] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2258.049248] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 2258.049248] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2258.068106] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2258.068291] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2258.068407] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2258.068525] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2258.068650] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 2258.321751] env[68798]: DEBUG oslo_vmware.api [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Task: {'id': task-4217734, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072673} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2258.322025] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2258.322170] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2258.322348] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2258.322520] env[68798]: INFO nova.compute.manager [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Took 0.63 seconds to destroy the instance on the hypervisor. [ 2258.324806] env[68798]: DEBUG nova.compute.claims [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2258.324976] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2258.325222] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2258.439020] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec139514-7a6e-4030-a94d-68fcbc44028a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.446532] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e55bae6-75b2-4819-8f66-44883ef14038 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.475941] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-127ed907-74d6-4f27-90ea-908b267ead1a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.483365] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0670446e-8e69-4dd1-9747-b95556d729d7 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.496896] env[68798]: DEBUG nova.compute.provider_tree [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2258.505787] env[68798]: DEBUG nova.scheduler.client.report [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2258.519653] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.194s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2258.520207] env[68798]: ERROR nova.compute.manager [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2258.520207] env[68798]: Faults: ['InvalidArgument'] [ 2258.520207] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] Traceback (most recent call last): [ 2258.520207] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2258.520207] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] self.driver.spawn(context, instance, image_meta, [ 2258.520207] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2258.520207] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2258.520207] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2258.520207] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] self._fetch_image_if_missing(context, vi) [ 2258.520207] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2258.520207] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] image_cache(vi, tmp_image_ds_loc) [ 2258.520207] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2258.520207] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] vm_util.copy_virtual_disk( [ 2258.520207] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2258.520207] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] session._wait_for_task(vmdk_copy_task) [ 2258.520207] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2258.520207] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] return self.wait_for_task(task_ref) [ 2258.520207] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2258.520207] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] return evt.wait() [ 2258.520207] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2258.520207] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] result = hub.switch() [ 2258.520207] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2258.520207] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] return self.greenlet.switch() [ 2258.520207] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2258.520207] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] self.f(*self.args, **self.kw) [ 2258.520207] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2258.520207] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] raise exceptions.translate_fault(task_info.error) [ 2258.520207] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2258.520207] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] Faults: ['InvalidArgument'] [ 2258.520207] env[68798]: ERROR nova.compute.manager [instance: ce408b93-3713-4819-8c80-63735d9a5467] [ 2258.521099] env[68798]: DEBUG nova.compute.utils [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2258.522329] env[68798]: DEBUG nova.compute.manager [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Build of instance ce408b93-3713-4819-8c80-63735d9a5467 was re-scheduled: A specified parameter was not correct: fileType [ 2258.522329] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2258.522721] env[68798]: DEBUG nova.compute.manager [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2258.522901] env[68798]: DEBUG nova.compute.manager [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2258.523086] env[68798]: DEBUG nova.compute.manager [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2258.523256] env[68798]: DEBUG nova.network.neutron [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2258.854771] env[68798]: DEBUG nova.network.neutron [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2258.873774] env[68798]: INFO nova.compute.manager [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Took 0.35 seconds to deallocate network for instance. [ 2258.983719] env[68798]: INFO nova.scheduler.client.report [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Deleted allocations for instance ce408b93-3713-4819-8c80-63735d9a5467 [ 2259.006251] env[68798]: DEBUG oslo_concurrency.lockutils [None req-fcacaf20-f574-4b87-a4e9-480f22637937 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Lock "ce408b93-3713-4819-8c80-63735d9a5467" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 606.356s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2259.006466] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e1a4b0a8-2b22-414a-96bf-f47ea06e66d3 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Lock "ce408b93-3713-4819-8c80-63735d9a5467" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 410.262s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2259.006697] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e1a4b0a8-2b22-414a-96bf-f47ea06e66d3 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Acquiring lock "ce408b93-3713-4819-8c80-63735d9a5467-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2259.006905] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e1a4b0a8-2b22-414a-96bf-f47ea06e66d3 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Lock "ce408b93-3713-4819-8c80-63735d9a5467-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2259.007094] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e1a4b0a8-2b22-414a-96bf-f47ea06e66d3 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Lock "ce408b93-3713-4819-8c80-63735d9a5467-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2259.009254] env[68798]: INFO nova.compute.manager [None req-e1a4b0a8-2b22-414a-96bf-f47ea06e66d3 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Terminating instance [ 2259.011375] env[68798]: DEBUG nova.compute.manager [None req-e1a4b0a8-2b22-414a-96bf-f47ea06e66d3 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2259.011580] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-e1a4b0a8-2b22-414a-96bf-f47ea06e66d3 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2259.012147] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a1823447-3f78-447f-a0c7-5d042022be87 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.022063] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e33fffb8-dc91-4d79-912a-c296d607c00a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.047939] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-e1a4b0a8-2b22-414a-96bf-f47ea06e66d3 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ce408b93-3713-4819-8c80-63735d9a5467 could not be found. [ 2259.048168] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-e1a4b0a8-2b22-414a-96bf-f47ea06e66d3 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2259.048353] env[68798]: INFO nova.compute.manager [None req-e1a4b0a8-2b22-414a-96bf-f47ea06e66d3 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2259.048610] env[68798]: DEBUG oslo.service.loopingcall [None req-e1a4b0a8-2b22-414a-96bf-f47ea06e66d3 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2259.048830] env[68798]: DEBUG nova.compute.manager [-] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2259.048927] env[68798]: DEBUG nova.network.neutron [-] [instance: ce408b93-3713-4819-8c80-63735d9a5467] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2259.075567] env[68798]: DEBUG nova.network.neutron [-] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2259.083903] env[68798]: INFO nova.compute.manager [-] [instance: ce408b93-3713-4819-8c80-63735d9a5467] Took 0.03 seconds to deallocate network for instance. [ 2259.177746] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e1a4b0a8-2b22-414a-96bf-f47ea06e66d3 tempest-DeleteServersTestJSON-415004131 tempest-DeleteServersTestJSON-415004131-project-member] Lock "ce408b93-3713-4819-8c80-63735d9a5467" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.171s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2260.048425] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2260.048701] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2260.048814] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 2261.044775] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2261.048422] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2264.048586] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2267.045064] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2270.048540] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2270.062715] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2270.062976] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2270.063158] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2270.063320] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2270.064485] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05356234-9146-4dd4-99d8-5764f697e79f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.073978] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad0fd5d9-cc57-4e07-98c4-669e60d79019 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.089399] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3ed79d7-0381-49ea-9fd4-46a11cf1cfc9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.096487] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa0b0f5-7653-420a-9d96-9c78411e7c3f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.126269] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180755MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2270.126449] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2270.126632] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2270.182069] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance de697c7a-bcc4-4d01-a9ec-8467e89d4ada actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2270.182336] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 538aeb6d-0aca-4d72-af14-859f4397514b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2270.182540] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 5a65c46e-989d-4a8f-9387-86cde7725173 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2270.182825] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2270.182982] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2270.237015] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa5cfc5a-d42e-4275-8f9b-a35eaaa70d8b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.245445] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03fe057f-fbd8-4179-9c4a-f57b3a1a81ee {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.275257] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3381e54e-c427-4b43-9bf1-1d3898f60614 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.283168] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-193b6699-5cae-4301-8323-a3c92199b782 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.299053] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2270.308515] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2270.327017] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2270.327252] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.201s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2303.090069] env[68798]: WARNING oslo_vmware.rw_handles [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2303.090069] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2303.090069] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2303.090069] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2303.090069] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2303.090069] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 2303.090069] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2303.090069] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2303.090069] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2303.090069] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2303.090069] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2303.090069] env[68798]: ERROR oslo_vmware.rw_handles [ 2303.090069] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/d3cbccfc-6f27-4da2-a028-8a089df1c242/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2303.091950] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2303.092229] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Copying Virtual Disk [datastore1] vmware_temp/d3cbccfc-6f27-4da2-a028-8a089df1c242/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/d3cbccfc-6f27-4da2-a028-8a089df1c242/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2303.092527] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ff85016-b0cb-4500-aa19-92266e151766 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.101311] env[68798]: DEBUG oslo_vmware.api [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Waiting for the task: (returnval){ [ 2303.101311] env[68798]: value = "task-4217735" [ 2303.101311] env[68798]: _type = "Task" [ 2303.101311] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2303.110078] env[68798]: DEBUG oslo_vmware.api [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Task: {'id': task-4217735, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2303.611677] env[68798]: DEBUG oslo_vmware.exceptions [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2303.611978] env[68798]: DEBUG oslo_concurrency.lockutils [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2303.612571] env[68798]: ERROR nova.compute.manager [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2303.612571] env[68798]: Faults: ['InvalidArgument'] [ 2303.612571] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Traceback (most recent call last): [ 2303.612571] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2303.612571] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] yield resources [ 2303.612571] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2303.612571] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] self.driver.spawn(context, instance, image_meta, [ 2303.612571] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2303.612571] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2303.612571] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2303.612571] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] self._fetch_image_if_missing(context, vi) [ 2303.612571] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2303.612571] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] image_cache(vi, tmp_image_ds_loc) [ 2303.612571] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2303.612571] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] vm_util.copy_virtual_disk( [ 2303.612571] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2303.612571] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] session._wait_for_task(vmdk_copy_task) [ 2303.612571] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2303.612571] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] return self.wait_for_task(task_ref) [ 2303.612571] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2303.612571] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] return evt.wait() [ 2303.612571] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2303.612571] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] result = hub.switch() [ 2303.612571] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2303.612571] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] return self.greenlet.switch() [ 2303.612571] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2303.612571] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] self.f(*self.args, **self.kw) [ 2303.612571] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2303.612571] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] raise exceptions.translate_fault(task_info.error) [ 2303.612571] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2303.612571] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Faults: ['InvalidArgument'] [ 2303.612571] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] [ 2303.613843] env[68798]: INFO nova.compute.manager [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Terminating instance [ 2303.614532] env[68798]: DEBUG oslo_concurrency.lockutils [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2303.614740] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2303.614982] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-845d5152-046b-4ba3-b67c-7234827c041f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.617181] env[68798]: DEBUG nova.compute.manager [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2303.617381] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2303.618111] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-665bd5b2-40c2-47e6-8133-c90c288e4c51 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.625041] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2303.625282] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-505de251-7059-41d7-8f02-8820d103840a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.627494] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2303.627670] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2303.628629] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56f1a631-3fae-48ff-94df-106e41068b30 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.634160] env[68798]: DEBUG oslo_vmware.api [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Waiting for the task: (returnval){ [ 2303.634160] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]524f2c9f-48ef-6db4-2c07-6a8cf13cb2bf" [ 2303.634160] env[68798]: _type = "Task" [ 2303.634160] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2303.641934] env[68798]: DEBUG oslo_vmware.api [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]524f2c9f-48ef-6db4-2c07-6a8cf13cb2bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2303.702062] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2303.702287] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2303.702470] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Deleting the datastore file [datastore1] de697c7a-bcc4-4d01-a9ec-8467e89d4ada {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2303.702754] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cdcb7540-9e79-4098-9675-8566ed3f7356 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.710249] env[68798]: DEBUG oslo_vmware.api [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Waiting for the task: (returnval){ [ 2303.710249] env[68798]: value = "task-4217737" [ 2303.710249] env[68798]: _type = "Task" [ 2303.710249] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2303.718583] env[68798]: DEBUG oslo_vmware.api [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Task: {'id': task-4217737, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2304.144759] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2304.145159] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Creating directory with path [datastore1] vmware_temp/737f5028-91bc-4a0b-a3ea-1ae8d0514170/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2304.145289] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5e4c2f98-f5df-448b-b31d-3772bb64576d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.157420] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Created directory with path [datastore1] vmware_temp/737f5028-91bc-4a0b-a3ea-1ae8d0514170/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2304.157666] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Fetch image to [datastore1] vmware_temp/737f5028-91bc-4a0b-a3ea-1ae8d0514170/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2304.157858] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/737f5028-91bc-4a0b-a3ea-1ae8d0514170/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2304.158671] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c6c05a3-cf42-420c-9114-fc47e4c5c911 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.165932] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72123775-4b90-4a3b-a5ed-a9a2fada04ac {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.175378] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44e6a5a2-4063-459e-aa35-4c525c694820 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.207011] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d207f2b1-73c7-4ef3-8b1b-0cfe2ff10cf3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.215357] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-56539060-096d-4687-9cc9-b6b4098e0a30 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.219496] env[68798]: DEBUG oslo_vmware.api [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Task: {'id': task-4217737, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083643} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2304.220092] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2304.220324] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2304.220529] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2304.220713] env[68798]: INFO nova.compute.manager [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2304.222891] env[68798]: DEBUG nova.compute.claims [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2304.223069] env[68798]: DEBUG oslo_concurrency.lockutils [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2304.223299] env[68798]: DEBUG oslo_concurrency.lockutils [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2304.239432] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2304.292018] env[68798]: DEBUG oslo_vmware.rw_handles [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/737f5028-91bc-4a0b-a3ea-1ae8d0514170/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2304.352424] env[68798]: DEBUG oslo_vmware.rw_handles [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2304.352619] env[68798]: DEBUG oslo_vmware.rw_handles [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/737f5028-91bc-4a0b-a3ea-1ae8d0514170/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2304.362332] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22662465-6cb3-4ad3-aa7a-f320bc64ff26 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.372463] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b051b510-4488-4e73-818a-042acde4b233 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.404116] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6889394-7928-4d14-81d1-d90f2c1989f2 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.411784] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b30b6712-9d9b-453b-9cc9-528223ac56f0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.426893] env[68798]: DEBUG nova.compute.provider_tree [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2304.435992] env[68798]: DEBUG nova.scheduler.client.report [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2304.449484] env[68798]: DEBUG oslo_concurrency.lockutils [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.226s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2304.450150] env[68798]: ERROR nova.compute.manager [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2304.450150] env[68798]: Faults: ['InvalidArgument'] [ 2304.450150] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Traceback (most recent call last): [ 2304.450150] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2304.450150] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] self.driver.spawn(context, instance, image_meta, [ 2304.450150] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2304.450150] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2304.450150] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2304.450150] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] self._fetch_image_if_missing(context, vi) [ 2304.450150] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2304.450150] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] image_cache(vi, tmp_image_ds_loc) [ 2304.450150] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2304.450150] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] vm_util.copy_virtual_disk( [ 2304.450150] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2304.450150] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] session._wait_for_task(vmdk_copy_task) [ 2304.450150] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2304.450150] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] return self.wait_for_task(task_ref) [ 2304.450150] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2304.450150] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] return evt.wait() [ 2304.450150] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2304.450150] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] result = hub.switch() [ 2304.450150] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2304.450150] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] return self.greenlet.switch() [ 2304.450150] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2304.450150] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] self.f(*self.args, **self.kw) [ 2304.450150] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2304.450150] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] raise exceptions.translate_fault(task_info.error) [ 2304.450150] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2304.450150] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Faults: ['InvalidArgument'] [ 2304.450150] env[68798]: ERROR nova.compute.manager [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] [ 2304.451378] env[68798]: DEBUG nova.compute.utils [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2304.453077] env[68798]: DEBUG nova.compute.manager [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Build of instance de697c7a-bcc4-4d01-a9ec-8467e89d4ada was re-scheduled: A specified parameter was not correct: fileType [ 2304.453077] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2304.453500] env[68798]: DEBUG nova.compute.manager [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2304.453645] env[68798]: DEBUG nova.compute.manager [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2304.453817] env[68798]: DEBUG nova.compute.manager [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2304.454019] env[68798]: DEBUG nova.network.neutron [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2304.764324] env[68798]: DEBUG nova.network.neutron [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2304.775674] env[68798]: INFO nova.compute.manager [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Took 0.32 seconds to deallocate network for instance. [ 2304.879498] env[68798]: INFO nova.scheduler.client.report [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Deleted allocations for instance de697c7a-bcc4-4d01-a9ec-8467e89d4ada [ 2304.900090] env[68798]: DEBUG oslo_concurrency.lockutils [None req-02316e8a-19b1-4f17-9646-114c5a0e8ab8 tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Lock "de697c7a-bcc4-4d01-a9ec-8467e89d4ada" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 513.127s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2304.900372] env[68798]: DEBUG oslo_concurrency.lockutils [None req-994c90e2-0da4-476f-a06b-78e96f10687c tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Lock "de697c7a-bcc4-4d01-a9ec-8467e89d4ada" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 317.560s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2304.900612] env[68798]: DEBUG oslo_concurrency.lockutils [None req-994c90e2-0da4-476f-a06b-78e96f10687c tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Acquiring lock "de697c7a-bcc4-4d01-a9ec-8467e89d4ada-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2304.900824] env[68798]: DEBUG oslo_concurrency.lockutils [None req-994c90e2-0da4-476f-a06b-78e96f10687c tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Lock "de697c7a-bcc4-4d01-a9ec-8467e89d4ada-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2304.900994] env[68798]: DEBUG oslo_concurrency.lockutils [None req-994c90e2-0da4-476f-a06b-78e96f10687c tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Lock "de697c7a-bcc4-4d01-a9ec-8467e89d4ada-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2304.903063] env[68798]: INFO nova.compute.manager [None req-994c90e2-0da4-476f-a06b-78e96f10687c tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Terminating instance [ 2304.904855] env[68798]: DEBUG nova.compute.manager [None req-994c90e2-0da4-476f-a06b-78e96f10687c tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2304.905039] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-994c90e2-0da4-476f-a06b-78e96f10687c tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2304.905546] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3c8b3f65-bb5b-438c-911f-406f47a5e0b5 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.914956] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff75f68b-00ce-4e80-a3c9-2e86cc5069ef {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.940260] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-994c90e2-0da4-476f-a06b-78e96f10687c tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance de697c7a-bcc4-4d01-a9ec-8467e89d4ada could not be found. [ 2304.940485] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-994c90e2-0da4-476f-a06b-78e96f10687c tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2304.940668] env[68798]: INFO nova.compute.manager [None req-994c90e2-0da4-476f-a06b-78e96f10687c tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2304.940923] env[68798]: DEBUG oslo.service.loopingcall [None req-994c90e2-0da4-476f-a06b-78e96f10687c tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2304.941177] env[68798]: DEBUG nova.compute.manager [-] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2304.941276] env[68798]: DEBUG nova.network.neutron [-] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2304.966771] env[68798]: DEBUG nova.network.neutron [-] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2304.974797] env[68798]: INFO nova.compute.manager [-] [instance: de697c7a-bcc4-4d01-a9ec-8467e89d4ada] Took 0.03 seconds to deallocate network for instance. [ 2305.083377] env[68798]: DEBUG oslo_concurrency.lockutils [None req-994c90e2-0da4-476f-a06b-78e96f10687c tempest-ServerTagsTestJSON-1865354184 tempest-ServerTagsTestJSON-1865354184-project-member] Lock "de697c7a-bcc4-4d01-a9ec-8467e89d4ada" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.183s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2315.328186] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2318.049129] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2318.049497] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 2318.049497] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2318.063098] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2318.063324] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2318.063419] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 2319.048258] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2320.049474] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2320.049857] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 2321.045022] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2321.047704] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2322.049525] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2325.049064] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2328.800941] env[68798]: DEBUG oslo_concurrency.lockutils [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Acquiring lock "c5023157-fb4e-4f4b-b845-5fc9eac80cba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2328.801323] env[68798]: DEBUG oslo_concurrency.lockutils [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Lock "c5023157-fb4e-4f4b-b845-5fc9eac80cba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2328.812193] env[68798]: DEBUG nova.compute.manager [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2328.861464] env[68798]: DEBUG oslo_concurrency.lockutils [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2328.861727] env[68798]: DEBUG oslo_concurrency.lockutils [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2328.863225] env[68798]: INFO nova.compute.claims [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2328.953064] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bcd8c43-0aa0-4bac-b68e-c2093eba96af {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.960718] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-428cc354-23a7-437c-bcc6-f6cb400e422a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.989992] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edddd139-7e10-4afa-aa24-f77558a439d2 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.997350] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16378b41-4394-4bd8-824f-61fee43d836d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.009970] env[68798]: DEBUG nova.compute.provider_tree [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2329.020198] env[68798]: DEBUG nova.scheduler.client.report [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2329.034625] env[68798]: DEBUG oslo_concurrency.lockutils [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.173s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2329.035104] env[68798]: DEBUG nova.compute.manager [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2329.065142] env[68798]: DEBUG nova.compute.utils [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2329.066465] env[68798]: DEBUG nova.compute.manager [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2329.066654] env[68798]: DEBUG nova.network.neutron [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2329.075705] env[68798]: DEBUG nova.compute.manager [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2329.134183] env[68798]: DEBUG nova.policy [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c2028f2f661549a6b1607fed075b9a35', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ba3727c039d41daacf0d1d32f7261d6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 2329.140180] env[68798]: DEBUG nova.compute.manager [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2329.166017] env[68798]: DEBUG nova.virt.hardware [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2329.166017] env[68798]: DEBUG nova.virt.hardware [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2329.166017] env[68798]: DEBUG nova.virt.hardware [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2329.166017] env[68798]: DEBUG nova.virt.hardware [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2329.166017] env[68798]: DEBUG nova.virt.hardware [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2329.166017] env[68798]: DEBUG nova.virt.hardware [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2329.166017] env[68798]: DEBUG nova.virt.hardware [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2329.166408] env[68798]: DEBUG nova.virt.hardware [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2329.166722] env[68798]: DEBUG nova.virt.hardware [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2329.167033] env[68798]: DEBUG nova.virt.hardware [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2329.167338] env[68798]: DEBUG nova.virt.hardware [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2329.168314] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5915638-05ba-42ff-bfc7-4c6a6a11f9d0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.176772] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a1403fc-1aa4-4afa-94d5-8c386fe0cc5c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.465801] env[68798]: DEBUG nova.network.neutron [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Successfully created port: b62eac52-d376-4e29-b145-8540eb8055c8 {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2330.048378] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2330.061079] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2330.061325] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2330.061506] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2330.061669] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2330.062896] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3464099e-5568-43cd-9292-9956909f5e87 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.072748] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b12428-af2a-48c3-b66f-97a09e995aeb {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.091863] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-329dbe05-ddbd-4e45-b896-dd274d8d1bec {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.100522] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3407738b-3121-43ad-9ce0-e3b5c11c11df {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.105950] env[68798]: DEBUG nova.compute.manager [req-633d7af6-7e68-4e3b-804b-4e30fc98a99b req-8da8e93f-728a-4bf5-831b-af7b4d78260c service nova] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Received event network-vif-plugged-b62eac52-d376-4e29-b145-8540eb8055c8 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2330.106195] env[68798]: DEBUG oslo_concurrency.lockutils [req-633d7af6-7e68-4e3b-804b-4e30fc98a99b req-8da8e93f-728a-4bf5-831b-af7b4d78260c service nova] Acquiring lock "c5023157-fb4e-4f4b-b845-5fc9eac80cba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2330.106408] env[68798]: DEBUG oslo_concurrency.lockutils [req-633d7af6-7e68-4e3b-804b-4e30fc98a99b req-8da8e93f-728a-4bf5-831b-af7b4d78260c service nova] Lock "c5023157-fb4e-4f4b-b845-5fc9eac80cba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2330.106579] env[68798]: DEBUG oslo_concurrency.lockutils [req-633d7af6-7e68-4e3b-804b-4e30fc98a99b req-8da8e93f-728a-4bf5-831b-af7b4d78260c service nova] Lock "c5023157-fb4e-4f4b-b845-5fc9eac80cba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2330.106749] env[68798]: DEBUG nova.compute.manager [req-633d7af6-7e68-4e3b-804b-4e30fc98a99b req-8da8e93f-728a-4bf5-831b-af7b4d78260c service nova] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] No waiting events found dispatching network-vif-plugged-b62eac52-d376-4e29-b145-8540eb8055c8 {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2330.106915] env[68798]: WARNING nova.compute.manager [req-633d7af6-7e68-4e3b-804b-4e30fc98a99b req-8da8e93f-728a-4bf5-831b-af7b4d78260c service nova] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Received unexpected event network-vif-plugged-b62eac52-d376-4e29-b145-8540eb8055c8 for instance with vm_state building and task_state spawning. [ 2330.137099] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180739MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2330.137366] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2330.137739] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2330.192360] env[68798]: DEBUG nova.network.neutron [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Successfully updated port: b62eac52-d376-4e29-b145-8540eb8055c8 {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2330.198571] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 538aeb6d-0aca-4d72-af14-859f4397514b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2330.198723] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 5a65c46e-989d-4a8f-9387-86cde7725173 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2330.198848] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance c5023157-fb4e-4f4b-b845-5fc9eac80cba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2330.199044] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2330.199179] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2330.204560] env[68798]: DEBUG oslo_concurrency.lockutils [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Acquiring lock "refresh_cache-c5023157-fb4e-4f4b-b845-5fc9eac80cba" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2330.204732] env[68798]: DEBUG oslo_concurrency.lockutils [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Acquired lock "refresh_cache-c5023157-fb4e-4f4b-b845-5fc9eac80cba" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2330.204888] env[68798]: DEBUG nova.network.neutron [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2330.256723] env[68798]: DEBUG nova.network.neutron [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2330.261149] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6521b95e-917b-4291-81bc-b684b747e6f9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.269542] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d96aa7-272c-4e0f-9321-e7df52b53de7 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.302260] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acb0ee0d-a044-46d5-a7c8-2520b8868296 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.310117] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b95ab899-26ad-4d00-8b15-500b0186e13b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.324389] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2330.334513] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2330.349232] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2330.349428] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.212s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2330.518994] env[68798]: DEBUG nova.network.neutron [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Updating instance_info_cache with network_info: [{"id": "b62eac52-d376-4e29-b145-8540eb8055c8", "address": "fa:16:3e:e0:90:08", "network": {"id": "62ee92b6-f97c-4e68-9dbd-f52d1e5dbcb3", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-789824559-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ba3727c039d41daacf0d1d32f7261d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb62eac52-d3", "ovs_interfaceid": "b62eac52-d376-4e29-b145-8540eb8055c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2330.530186] env[68798]: DEBUG oslo_concurrency.lockutils [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Releasing lock "refresh_cache-c5023157-fb4e-4f4b-b845-5fc9eac80cba" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2330.530470] env[68798]: DEBUG nova.compute.manager [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Instance network_info: |[{"id": "b62eac52-d376-4e29-b145-8540eb8055c8", "address": "fa:16:3e:e0:90:08", "network": {"id": "62ee92b6-f97c-4e68-9dbd-f52d1e5dbcb3", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-789824559-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ba3727c039d41daacf0d1d32f7261d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb62eac52-d3", "ovs_interfaceid": "b62eac52-d376-4e29-b145-8540eb8055c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2330.531291] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:90:08', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd88b750a-0e7d-4f16-8bd5-8e6d5743b720', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b62eac52-d376-4e29-b145-8540eb8055c8', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2330.538864] env[68798]: DEBUG oslo.service.loopingcall [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2330.539474] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2330.539636] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ad17425f-9ecd-42f5-b847-68eba25f7099 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.561399] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2330.561399] env[68798]: value = "task-4217738" [ 2330.561399] env[68798]: _type = "Task" [ 2330.561399] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2330.570485] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217738, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2331.072806] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217738, 'name': CreateVM_Task, 'duration_secs': 0.305479} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2331.073237] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2331.073681] env[68798]: DEBUG oslo_concurrency.lockutils [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2331.073848] env[68798]: DEBUG oslo_concurrency.lockutils [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2331.074231] env[68798]: DEBUG oslo_concurrency.lockutils [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2331.074529] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-051b8bb4-d48a-4170-acf2-2bdcec9143d7 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2331.079200] env[68798]: DEBUG oslo_vmware.api [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Waiting for the task: (returnval){ [ 2331.079200] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52a6740b-8ce9-0f0a-70e7-a94d580b810d" [ 2331.079200] env[68798]: _type = "Task" [ 2331.079200] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2331.088013] env[68798]: DEBUG oslo_vmware.api [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52a6740b-8ce9-0f0a-70e7-a94d580b810d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2331.590062] env[68798]: DEBUG oslo_concurrency.lockutils [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2331.590336] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2331.590552] env[68798]: DEBUG oslo_concurrency.lockutils [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2332.127628] env[68798]: DEBUG nova.compute.manager [req-f5e1cf98-49aa-449d-b451-7a943318ed2d req-6b9192db-ce00-4c1c-bdd7-47df85c4f233 service nova] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Received event network-changed-b62eac52-d376-4e29-b145-8540eb8055c8 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2332.127873] env[68798]: DEBUG nova.compute.manager [req-f5e1cf98-49aa-449d-b451-7a943318ed2d req-6b9192db-ce00-4c1c-bdd7-47df85c4f233 service nova] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Refreshing instance network info cache due to event network-changed-b62eac52-d376-4e29-b145-8540eb8055c8. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 2332.128072] env[68798]: DEBUG oslo_concurrency.lockutils [req-f5e1cf98-49aa-449d-b451-7a943318ed2d req-6b9192db-ce00-4c1c-bdd7-47df85c4f233 service nova] Acquiring lock "refresh_cache-c5023157-fb4e-4f4b-b845-5fc9eac80cba" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2332.128220] env[68798]: DEBUG oslo_concurrency.lockutils [req-f5e1cf98-49aa-449d-b451-7a943318ed2d req-6b9192db-ce00-4c1c-bdd7-47df85c4f233 service nova] Acquired lock "refresh_cache-c5023157-fb4e-4f4b-b845-5fc9eac80cba" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2332.128394] env[68798]: DEBUG nova.network.neutron [req-f5e1cf98-49aa-449d-b451-7a943318ed2d req-6b9192db-ce00-4c1c-bdd7-47df85c4f233 service nova] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Refreshing network info cache for port b62eac52-d376-4e29-b145-8540eb8055c8 {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2332.567961] env[68798]: DEBUG nova.network.neutron [req-f5e1cf98-49aa-449d-b451-7a943318ed2d req-6b9192db-ce00-4c1c-bdd7-47df85c4f233 service nova] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Updated VIF entry in instance network info cache for port b62eac52-d376-4e29-b145-8540eb8055c8. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2332.568431] env[68798]: DEBUG nova.network.neutron [req-f5e1cf98-49aa-449d-b451-7a943318ed2d req-6b9192db-ce00-4c1c-bdd7-47df85c4f233 service nova] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Updating instance_info_cache with network_info: [{"id": "b62eac52-d376-4e29-b145-8540eb8055c8", "address": "fa:16:3e:e0:90:08", "network": {"id": "62ee92b6-f97c-4e68-9dbd-f52d1e5dbcb3", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-789824559-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ba3727c039d41daacf0d1d32f7261d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb62eac52-d3", "ovs_interfaceid": "b62eac52-d376-4e29-b145-8540eb8055c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2332.578689] env[68798]: DEBUG oslo_concurrency.lockutils [req-f5e1cf98-49aa-449d-b451-7a943318ed2d req-6b9192db-ce00-4c1c-bdd7-47df85c4f233 service nova] Releasing lock "refresh_cache-c5023157-fb4e-4f4b-b845-5fc9eac80cba" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2353.506308] env[68798]: WARNING oslo_vmware.rw_handles [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2353.506308] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2353.506308] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2353.506308] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2353.506308] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2353.506308] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 2353.506308] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2353.506308] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2353.506308] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2353.506308] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2353.506308] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2353.506308] env[68798]: ERROR oslo_vmware.rw_handles [ 2353.506953] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/737f5028-91bc-4a0b-a3ea-1ae8d0514170/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2353.508900] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2353.509187] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Copying Virtual Disk [datastore1] vmware_temp/737f5028-91bc-4a0b-a3ea-1ae8d0514170/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/737f5028-91bc-4a0b-a3ea-1ae8d0514170/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2353.509520] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1168b34f-3f94-4490-a3e5-7f19dc4e84b4 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.518176] env[68798]: DEBUG oslo_vmware.api [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Waiting for the task: (returnval){ [ 2353.518176] env[68798]: value = "task-4217739" [ 2353.518176] env[68798]: _type = "Task" [ 2353.518176] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2353.526237] env[68798]: DEBUG oslo_vmware.api [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Task: {'id': task-4217739, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2354.028497] env[68798]: DEBUG oslo_vmware.exceptions [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2354.028795] env[68798]: DEBUG oslo_concurrency.lockutils [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2354.029355] env[68798]: ERROR nova.compute.manager [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2354.029355] env[68798]: Faults: ['InvalidArgument'] [ 2354.029355] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Traceback (most recent call last): [ 2354.029355] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2354.029355] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] yield resources [ 2354.029355] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2354.029355] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] self.driver.spawn(context, instance, image_meta, [ 2354.029355] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2354.029355] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2354.029355] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2354.029355] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] self._fetch_image_if_missing(context, vi) [ 2354.029355] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2354.029355] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] image_cache(vi, tmp_image_ds_loc) [ 2354.029355] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2354.029355] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] vm_util.copy_virtual_disk( [ 2354.029355] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2354.029355] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] session._wait_for_task(vmdk_copy_task) [ 2354.029355] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2354.029355] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] return self.wait_for_task(task_ref) [ 2354.029355] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2354.029355] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] return evt.wait() [ 2354.029355] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2354.029355] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] result = hub.switch() [ 2354.029355] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2354.029355] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] return self.greenlet.switch() [ 2354.029355] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2354.029355] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] self.f(*self.args, **self.kw) [ 2354.029355] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2354.029355] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] raise exceptions.translate_fault(task_info.error) [ 2354.029355] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2354.029355] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Faults: ['InvalidArgument'] [ 2354.029355] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] [ 2354.030390] env[68798]: INFO nova.compute.manager [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Terminating instance [ 2354.031398] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2354.031577] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2354.031825] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-568fb8e5-64f4-40b4-97f3-4812af1ffdcd {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2354.034028] env[68798]: DEBUG nova.compute.manager [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2354.034228] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2354.034937] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9502d05c-2add-4121-b26a-764c2784141c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2354.041974] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2354.042184] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c0cab1f9-7291-48eb-96bb-075db2f9d93a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2354.044422] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2354.044591] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2354.045537] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0d234d3-5561-4c8f-870d-27d014056b6e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2354.050401] env[68798]: DEBUG oslo_vmware.api [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Waiting for the task: (returnval){ [ 2354.050401] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]526b1acd-8f8d-7d1a-25b6-8687ee1ebe31" [ 2354.050401] env[68798]: _type = "Task" [ 2354.050401] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2354.059349] env[68798]: DEBUG oslo_vmware.api [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]526b1acd-8f8d-7d1a-25b6-8687ee1ebe31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2354.113178] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2354.113474] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2354.113637] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Deleting the datastore file [datastore1] 538aeb6d-0aca-4d72-af14-859f4397514b {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2354.113919] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5d989f9b-4f62-48fb-8c9c-7dde07ae45c0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2354.120859] env[68798]: DEBUG oslo_vmware.api [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Waiting for the task: (returnval){ [ 2354.120859] env[68798]: value = "task-4217741" [ 2354.120859] env[68798]: _type = "Task" [ 2354.120859] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2354.128830] env[68798]: DEBUG oslo_vmware.api [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Task: {'id': task-4217741, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2354.560597] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2354.560952] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Creating directory with path [datastore1] vmware_temp/d7da5108-22d9-47ad-9774-81ffb6857623/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2354.561195] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3524d5c6-b9b5-4ac6-bb05-a5880461f7b1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2354.573182] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Created directory with path [datastore1] vmware_temp/d7da5108-22d9-47ad-9774-81ffb6857623/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2354.573427] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Fetch image to [datastore1] vmware_temp/d7da5108-22d9-47ad-9774-81ffb6857623/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2354.573642] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/d7da5108-22d9-47ad-9774-81ffb6857623/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2354.574423] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa21b388-064d-42ff-ac63-228b60fcd29f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2354.581061] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-159af0f3-2f48-45f7-bba0-7ac4b32cac6d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2354.590278] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5196211d-13bc-4a42-9447-e829e9db1a25 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2354.620367] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed8152c6-7293-4918-b453-788a2d6afa9a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2354.631923] env[68798]: DEBUG oslo_vmware.api [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Task: {'id': task-4217741, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077079} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2354.632138] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-116ee67f-41f6-4e8e-915a-b77fa0e47e52 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2354.633853] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2354.634062] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2354.634247] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2354.634451] env[68798]: INFO nova.compute.manager [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2354.636538] env[68798]: DEBUG nova.compute.claims [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2354.636729] env[68798]: DEBUG oslo_concurrency.lockutils [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2354.636946] env[68798]: DEBUG oslo_concurrency.lockutils [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2354.654922] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2354.714286] env[68798]: DEBUG oslo_vmware.rw_handles [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d7da5108-22d9-47ad-9774-81ffb6857623/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2354.772301] env[68798]: DEBUG oslo_vmware.rw_handles [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2354.772495] env[68798]: DEBUG oslo_vmware.rw_handles [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d7da5108-22d9-47ad-9774-81ffb6857623/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2354.794328] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c9985a-c62b-489e-b0f9-4b316bcea2bd {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2354.802215] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-936fb76a-2fd5-4d1b-9cb9-024d61dd47be {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2354.833508] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e220539a-367a-427a-bfc4-663e3dad0624 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2354.841308] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69c311b3-b568-4314-83c6-ddcedd6a373b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2354.854529] env[68798]: DEBUG nova.compute.provider_tree [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2354.864023] env[68798]: DEBUG nova.scheduler.client.report [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2354.877743] env[68798]: DEBUG oslo_concurrency.lockutils [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.241s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2354.878248] env[68798]: ERROR nova.compute.manager [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2354.878248] env[68798]: Faults: ['InvalidArgument'] [ 2354.878248] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Traceback (most recent call last): [ 2354.878248] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2354.878248] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] self.driver.spawn(context, instance, image_meta, [ 2354.878248] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2354.878248] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2354.878248] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2354.878248] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] self._fetch_image_if_missing(context, vi) [ 2354.878248] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2354.878248] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] image_cache(vi, tmp_image_ds_loc) [ 2354.878248] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2354.878248] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] vm_util.copy_virtual_disk( [ 2354.878248] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2354.878248] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] session._wait_for_task(vmdk_copy_task) [ 2354.878248] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2354.878248] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] return self.wait_for_task(task_ref) [ 2354.878248] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2354.878248] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] return evt.wait() [ 2354.878248] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2354.878248] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] result = hub.switch() [ 2354.878248] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2354.878248] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] return self.greenlet.switch() [ 2354.878248] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2354.878248] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] self.f(*self.args, **self.kw) [ 2354.878248] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2354.878248] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] raise exceptions.translate_fault(task_info.error) [ 2354.878248] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2354.878248] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Faults: ['InvalidArgument'] [ 2354.878248] env[68798]: ERROR nova.compute.manager [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] [ 2354.878977] env[68798]: DEBUG nova.compute.utils [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2354.880369] env[68798]: DEBUG nova.compute.manager [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Build of instance 538aeb6d-0aca-4d72-af14-859f4397514b was re-scheduled: A specified parameter was not correct: fileType [ 2354.880369] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2354.880730] env[68798]: DEBUG nova.compute.manager [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2354.880901] env[68798]: DEBUG nova.compute.manager [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2354.881085] env[68798]: DEBUG nova.compute.manager [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2354.881256] env[68798]: DEBUG nova.network.neutron [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2355.151846] env[68798]: DEBUG nova.network.neutron [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2355.166546] env[68798]: INFO nova.compute.manager [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Took 0.28 seconds to deallocate network for instance. [ 2355.258249] env[68798]: INFO nova.scheduler.client.report [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Deleted allocations for instance 538aeb6d-0aca-4d72-af14-859f4397514b [ 2355.282776] env[68798]: DEBUG oslo_concurrency.lockutils [None req-beb7897c-cd83-4ce0-95a6-aad3edc568ad tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Lock "538aeb6d-0aca-4d72-af14-859f4397514b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 419.094s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2355.283222] env[68798]: DEBUG oslo_concurrency.lockutils [None req-745d2f99-854b-4a17-97f3-81a114f8169b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Lock "538aeb6d-0aca-4d72-af14-859f4397514b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 223.530s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2355.284186] env[68798]: DEBUG oslo_concurrency.lockutils [None req-745d2f99-854b-4a17-97f3-81a114f8169b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Acquiring lock "538aeb6d-0aca-4d72-af14-859f4397514b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2355.284186] env[68798]: DEBUG oslo_concurrency.lockutils [None req-745d2f99-854b-4a17-97f3-81a114f8169b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Lock "538aeb6d-0aca-4d72-af14-859f4397514b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2355.284186] env[68798]: DEBUG oslo_concurrency.lockutils [None req-745d2f99-854b-4a17-97f3-81a114f8169b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Lock "538aeb6d-0aca-4d72-af14-859f4397514b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2355.286088] env[68798]: INFO nova.compute.manager [None req-745d2f99-854b-4a17-97f3-81a114f8169b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Terminating instance [ 2355.287858] env[68798]: DEBUG nova.compute.manager [None req-745d2f99-854b-4a17-97f3-81a114f8169b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2355.288129] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-745d2f99-854b-4a17-97f3-81a114f8169b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2355.288634] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1f62cd06-4e26-4c4d-bb60-ddd8aa1d29c5 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2355.298024] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9992123d-63fd-4a4b-aef3-467e66847780 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2355.323724] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-745d2f99-854b-4a17-97f3-81a114f8169b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 538aeb6d-0aca-4d72-af14-859f4397514b could not be found. [ 2355.323937] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-745d2f99-854b-4a17-97f3-81a114f8169b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2355.324130] env[68798]: INFO nova.compute.manager [None req-745d2f99-854b-4a17-97f3-81a114f8169b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2355.324421] env[68798]: DEBUG oslo.service.loopingcall [None req-745d2f99-854b-4a17-97f3-81a114f8169b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2355.324869] env[68798]: DEBUG nova.compute.manager [-] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2355.324967] env[68798]: DEBUG nova.network.neutron [-] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2355.352369] env[68798]: DEBUG nova.network.neutron [-] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2355.361523] env[68798]: INFO nova.compute.manager [-] [instance: 538aeb6d-0aca-4d72-af14-859f4397514b] Took 0.04 seconds to deallocate network for instance. [ 2355.461704] env[68798]: DEBUG oslo_concurrency.lockutils [None req-745d2f99-854b-4a17-97f3-81a114f8169b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Lock "538aeb6d-0aca-4d72-af14-859f4397514b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.179s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2374.529083] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2374.529568] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Getting list of instances from cluster (obj){ [ 2374.529568] env[68798]: value = "domain-c8" [ 2374.529568] env[68798]: _type = "ClusterComputeResource" [ 2374.529568] env[68798]: } {{(pid=68798) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2374.531088] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca85fd16-fe5c-409d-a09c-0c33d63ca6b7 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2374.542401] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Got total of 2 instances {{(pid=68798) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2376.080644] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2378.050071] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2378.050435] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 2378.050435] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2378.065017] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2378.065188] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2378.065330] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 2379.048410] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2382.049869] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2382.050311] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2382.050311] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2382.050453] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 2383.045669] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2386.049532] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2386.049532] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Cleaning up deleted instances {{(pid=68798) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11220}} [ 2386.060476] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] There are 0 instances to clean {{(pid=68798) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 2387.056260] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2387.071607] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2391.049295] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2391.061213] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2391.061483] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2391.061932] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2391.061932] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2391.063377] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abbaccef-dfce-425b-bfd9-feb051917d56 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.072734] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce38e899-14b0-45d8-89fa-2c096fa0f9bf {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.088307] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94f41282-b2bf-46ee-a1ce-b4958346a89b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.095284] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-644e6513-a918-4bfe-b41c-b22bac9890a5 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.125440] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180743MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2391.125617] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2391.125794] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2391.197535] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance 5a65c46e-989d-4a8f-9387-86cde7725173 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2391.197720] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance c5023157-fb4e-4f4b-b845-5fc9eac80cba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2391.197904] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2391.198061] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=768MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2391.214108] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Refreshing inventories for resource provider 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2391.228175] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Updating ProviderTree inventory for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2391.228373] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Updating inventory in ProviderTree for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2391.239392] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Refreshing aggregate associations for resource provider 855bb535-a51f-4f9d-8f32-8a3291b17319, aggregates: None {{(pid=68798) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2391.257430] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Refreshing trait associations for resource provider 855bb535-a51f-4f9d-8f32-8a3291b17319, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE {{(pid=68798) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2391.297013] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ea7aae-22ff-408e-92f5-9d83d762e0fa {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.304999] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4488109b-7313-4299-8d1c-351b8e035815 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.335141] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0af10f3-ec63-4823-8a1a-6be9a873fdf2 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.342364] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69e40fee-5f32-4fba-9419-a8e9035ebf83 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2391.355341] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2391.364231] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2391.377433] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2391.377626] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.252s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2391.377843] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2399.054807] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2399.055195] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Cleaning up deleted instances with incomplete migration {{(pid=68798) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11258}} [ 2403.124029] env[68798]: WARNING oslo_vmware.rw_handles [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2403.124029] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2403.124029] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2403.124029] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2403.124029] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2403.124029] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 2403.124029] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2403.124029] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2403.124029] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2403.124029] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2403.124029] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2403.124029] env[68798]: ERROR oslo_vmware.rw_handles [ 2403.124029] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/d7da5108-22d9-47ad-9774-81ffb6857623/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2403.125182] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2403.125585] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Copying Virtual Disk [datastore1] vmware_temp/d7da5108-22d9-47ad-9774-81ffb6857623/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/d7da5108-22d9-47ad-9774-81ffb6857623/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2403.126065] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f6b93b19-89d3-4d95-a6dd-d87e22f51e65 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.134868] env[68798]: DEBUG oslo_vmware.api [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Waiting for the task: (returnval){ [ 2403.134868] env[68798]: value = "task-4217742" [ 2403.134868] env[68798]: _type = "Task" [ 2403.134868] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2403.144368] env[68798]: DEBUG oslo_vmware.api [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Task: {'id': task-4217742, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2403.517256] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._sync_power_states {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2403.529586] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Getting list of instances from cluster (obj){ [ 2403.529586] env[68798]: value = "domain-c8" [ 2403.529586] env[68798]: _type = "ClusterComputeResource" [ 2403.529586] env[68798]: } {{(pid=68798) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2403.531296] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e939f0d1-9cbe-488f-af49-07567ee377a3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.544077] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Got total of 2 instances {{(pid=68798) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2403.544274] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Triggering sync for uuid 5a65c46e-989d-4a8f-9387-86cde7725173 {{(pid=68798) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 2403.544477] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Triggering sync for uuid c5023157-fb4e-4f4b-b845-5fc9eac80cba {{(pid=68798) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 2403.544838] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "5a65c46e-989d-4a8f-9387-86cde7725173" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2403.545113] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "c5023157-fb4e-4f4b-b845-5fc9eac80cba" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2403.600220] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquiring lock "c1d601ee-85b3-421d-be0b-d9ee43c324a0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2403.600463] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "c1d601ee-85b3-421d-be0b-d9ee43c324a0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2403.613612] env[68798]: DEBUG nova.compute.manager [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: c1d601ee-85b3-421d-be0b-d9ee43c324a0] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2403.645826] env[68798]: DEBUG oslo_vmware.exceptions [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2403.646256] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2403.646762] env[68798]: ERROR nova.compute.manager [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2403.646762] env[68798]: Faults: ['InvalidArgument'] [ 2403.646762] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Traceback (most recent call last): [ 2403.646762] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2403.646762] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] yield resources [ 2403.646762] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2403.646762] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] self.driver.spawn(context, instance, image_meta, [ 2403.646762] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2403.646762] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2403.646762] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2403.646762] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] self._fetch_image_if_missing(context, vi) [ 2403.646762] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2403.646762] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] image_cache(vi, tmp_image_ds_loc) [ 2403.646762] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2403.646762] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] vm_util.copy_virtual_disk( [ 2403.646762] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2403.646762] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] session._wait_for_task(vmdk_copy_task) [ 2403.646762] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2403.646762] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] return self.wait_for_task(task_ref) [ 2403.646762] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2403.646762] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] return evt.wait() [ 2403.646762] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2403.646762] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] result = hub.switch() [ 2403.646762] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2403.646762] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] return self.greenlet.switch() [ 2403.646762] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2403.646762] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] self.f(*self.args, **self.kw) [ 2403.646762] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2403.646762] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] raise exceptions.translate_fault(task_info.error) [ 2403.646762] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2403.646762] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Faults: ['InvalidArgument'] [ 2403.646762] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] [ 2403.647772] env[68798]: INFO nova.compute.manager [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Terminating instance [ 2403.649071] env[68798]: DEBUG oslo_concurrency.lockutils [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2403.649298] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2403.650564] env[68798]: DEBUG nova.compute.manager [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2403.650767] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2403.651007] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae442274-01bf-4235-83ea-3daea47daa47 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.656795] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0f25a7a-ee51-4ec9-859c-0f9aa183bcec {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.670387] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2403.671737] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-691c30b5-0b47-42ad-bf7d-13487680efe4 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.673473] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2403.673705] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2403.675542] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2403.675812] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2403.677413] env[68798]: INFO nova.compute.claims [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: c1d601ee-85b3-421d-be0b-d9ee43c324a0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2403.680042] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12678060-c406-41e5-940b-64f22a0dc79f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.686678] env[68798]: DEBUG oslo_vmware.api [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Waiting for the task: (returnval){ [ 2403.686678] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]52acb65a-eeec-9a45-85e7-bb5ebcdd175a" [ 2403.686678] env[68798]: _type = "Task" [ 2403.686678] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2403.694569] env[68798]: DEBUG oslo_vmware.api [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]52acb65a-eeec-9a45-85e7-bb5ebcdd175a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2403.764342] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2403.764578] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2403.764766] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Deleting the datastore file [datastore1] 5a65c46e-989d-4a8f-9387-86cde7725173 {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2403.765422] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5ba84c5a-c622-4a2c-9d34-e2eb63b1cb94 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.772536] env[68798]: DEBUG oslo_vmware.api [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Waiting for the task: (returnval){ [ 2403.772536] env[68798]: value = "task-4217744" [ 2403.772536] env[68798]: _type = "Task" [ 2403.772536] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2403.784653] env[68798]: DEBUG oslo_vmware.api [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Task: {'id': task-4217744, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2403.787848] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8dba72c-2ec3-4515-be19-2c5f6a94fd67 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.794921] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8896581d-b45e-4188-a76b-db1d25722ca6 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.825151] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2260df95-9504-4ec2-b332-606d7f59ae80 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.832720] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8869e138-eaf9-46cf-9163-a7e1f00cf85f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.846360] env[68798]: DEBUG nova.compute.provider_tree [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2403.858611] env[68798]: DEBUG nova.scheduler.client.report [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2403.873180] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.197s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2403.873705] env[68798]: DEBUG nova.compute.manager [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: c1d601ee-85b3-421d-be0b-d9ee43c324a0] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2403.909535] env[68798]: DEBUG nova.compute.utils [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2403.910887] env[68798]: DEBUG nova.compute.manager [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: c1d601ee-85b3-421d-be0b-d9ee43c324a0] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2403.911075] env[68798]: DEBUG nova.network.neutron [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: c1d601ee-85b3-421d-be0b-d9ee43c324a0] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2403.920135] env[68798]: DEBUG nova.compute.manager [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: c1d601ee-85b3-421d-be0b-d9ee43c324a0] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2403.981647] env[68798]: DEBUG nova.policy [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '58e7ee34608848b39cc2a7114e7d682d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8efeea8a59294c7ca8b499dda555a3d6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 2403.988775] env[68798]: DEBUG nova.compute.manager [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: c1d601ee-85b3-421d-be0b-d9ee43c324a0] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2404.017877] env[68798]: DEBUG nova.virt.hardware [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2404.018140] env[68798]: DEBUG nova.virt.hardware [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2404.018304] env[68798]: DEBUG nova.virt.hardware [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2404.018487] env[68798]: DEBUG nova.virt.hardware [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2404.018636] env[68798]: DEBUG nova.virt.hardware [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2404.018784] env[68798]: DEBUG nova.virt.hardware [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2404.018993] env[68798]: DEBUG nova.virt.hardware [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2404.019176] env[68798]: DEBUG nova.virt.hardware [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2404.019345] env[68798]: DEBUG nova.virt.hardware [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2404.019510] env[68798]: DEBUG nova.virt.hardware [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2404.019688] env[68798]: DEBUG nova.virt.hardware [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2404.020588] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d16f0012-a182-40ee-bf06-410d95102040 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.029360] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10e6136f-a7c1-4236-8fe1-72d0db8d5d26 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.197628] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2404.197976] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Creating directory with path [datastore1] vmware_temp/187b5c27-39b1-4d82-b576-95bcdaf17d48/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2404.198128] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5e9eda99-7696-4585-a9b7-4958c7339b7d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.210568] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Created directory with path [datastore1] vmware_temp/187b5c27-39b1-4d82-b576-95bcdaf17d48/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2404.210771] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Fetch image to [datastore1] vmware_temp/187b5c27-39b1-4d82-b576-95bcdaf17d48/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2404.210969] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/187b5c27-39b1-4d82-b576-95bcdaf17d48/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2404.211809] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21673eaa-95bc-441e-8c3b-7f61f96c66c2 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.219213] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15bfce97-056c-4b56-b8c5-4a404cd285af {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.229641] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-676cd2c2-a5a4-4c4c-a2ac-bb2d48948ab9 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.262052] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2a2d520-2a5a-4c4e-8675-44dc024b7523 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.268292] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-91ad99cc-30f9-4826-b288-259b94f7ba5e {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.285856] env[68798]: DEBUG oslo_vmware.api [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Task: {'id': task-4217744, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070013} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2404.286284] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2404.286497] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2404.286728] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2404.286843] env[68798]: INFO nova.compute.manager [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Took 0.64 seconds to destroy the instance on the hypervisor. [ 2404.289198] env[68798]: DEBUG nova.compute.claims [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2404.289393] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2404.289645] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2404.299991] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2404.318070] env[68798]: DEBUG nova.network.neutron [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: c1d601ee-85b3-421d-be0b-d9ee43c324a0] Successfully created port: 193ed98d-585e-4592-ab2a-6cea4af56a3c {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2404.361240] env[68798]: DEBUG oslo_vmware.rw_handles [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/187b5c27-39b1-4d82-b576-95bcdaf17d48/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2404.429680] env[68798]: DEBUG oslo_vmware.rw_handles [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2404.433025] env[68798]: DEBUG oslo_vmware.rw_handles [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/187b5c27-39b1-4d82-b576-95bcdaf17d48/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2404.496330] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02736301-86a7-40cc-a6d1-eb2ab5dc2027 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.504549] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-101f06af-cb1f-467d-aba2-48a0029487af {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.536665] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b51bedd-b82e-4faa-b035-39204dc269c6 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.544503] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f17b274-75de-4909-ad5d-baae924255ce {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.558741] env[68798]: DEBUG nova.compute.provider_tree [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2404.572325] env[68798]: DEBUG nova.scheduler.client.report [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2404.591047] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.301s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2404.591606] env[68798]: ERROR nova.compute.manager [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2404.591606] env[68798]: Faults: ['InvalidArgument'] [ 2404.591606] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Traceback (most recent call last): [ 2404.591606] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2404.591606] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] self.driver.spawn(context, instance, image_meta, [ 2404.591606] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2404.591606] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2404.591606] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2404.591606] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] self._fetch_image_if_missing(context, vi) [ 2404.591606] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2404.591606] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] image_cache(vi, tmp_image_ds_loc) [ 2404.591606] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2404.591606] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] vm_util.copy_virtual_disk( [ 2404.591606] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2404.591606] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] session._wait_for_task(vmdk_copy_task) [ 2404.591606] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2404.591606] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] return self.wait_for_task(task_ref) [ 2404.591606] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2404.591606] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] return evt.wait() [ 2404.591606] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2404.591606] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] result = hub.switch() [ 2404.591606] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2404.591606] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] return self.greenlet.switch() [ 2404.591606] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2404.591606] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] self.f(*self.args, **self.kw) [ 2404.591606] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2404.591606] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] raise exceptions.translate_fault(task_info.error) [ 2404.591606] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2404.591606] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Faults: ['InvalidArgument'] [ 2404.591606] env[68798]: ERROR nova.compute.manager [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] [ 2404.592390] env[68798]: DEBUG nova.compute.utils [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2404.594848] env[68798]: DEBUG nova.compute.manager [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Build of instance 5a65c46e-989d-4a8f-9387-86cde7725173 was re-scheduled: A specified parameter was not correct: fileType [ 2404.594848] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2404.594848] env[68798]: DEBUG nova.compute.manager [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2404.596199] env[68798]: DEBUG nova.compute.manager [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2404.596199] env[68798]: DEBUG nova.compute.manager [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2404.596199] env[68798]: DEBUG nova.network.neutron [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2405.055048] env[68798]: DEBUG nova.network.neutron [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2405.102953] env[68798]: INFO nova.compute.manager [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Took 0.51 seconds to deallocate network for instance. [ 2405.205235] env[68798]: DEBUG nova.network.neutron [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: c1d601ee-85b3-421d-be0b-d9ee43c324a0] Successfully updated port: 193ed98d-585e-4592-ab2a-6cea4af56a3c {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2405.222830] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquiring lock "refresh_cache-c1d601ee-85b3-421d-be0b-d9ee43c324a0" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2405.222878] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquired lock "refresh_cache-c1d601ee-85b3-421d-be0b-d9ee43c324a0" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2405.223020] env[68798]: DEBUG nova.network.neutron [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: c1d601ee-85b3-421d-be0b-d9ee43c324a0] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2405.267612] env[68798]: DEBUG nova.network.neutron [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: c1d601ee-85b3-421d-be0b-d9ee43c324a0] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2405.271629] env[68798]: INFO nova.scheduler.client.report [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Deleted allocations for instance 5a65c46e-989d-4a8f-9387-86cde7725173 [ 2405.301125] env[68798]: DEBUG oslo_concurrency.lockutils [None req-e29d85fa-92de-4d7b-a9c5-2fdac408abf2 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "5a65c46e-989d-4a8f-9387-86cde7725173" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 394.025s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2405.301394] env[68798]: DEBUG oslo_concurrency.lockutils [None req-973a9bbf-b84f-4715-a371-689bfa1a2460 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "5a65c46e-989d-4a8f-9387-86cde7725173" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 198.953s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2405.301626] env[68798]: DEBUG oslo_concurrency.lockutils [None req-973a9bbf-b84f-4715-a371-689bfa1a2460 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquiring lock "5a65c46e-989d-4a8f-9387-86cde7725173-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2405.301826] env[68798]: DEBUG oslo_concurrency.lockutils [None req-973a9bbf-b84f-4715-a371-689bfa1a2460 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "5a65c46e-989d-4a8f-9387-86cde7725173-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2405.301998] env[68798]: DEBUG oslo_concurrency.lockutils [None req-973a9bbf-b84f-4715-a371-689bfa1a2460 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "5a65c46e-989d-4a8f-9387-86cde7725173-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2405.304092] env[68798]: INFO nova.compute.manager [None req-973a9bbf-b84f-4715-a371-689bfa1a2460 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Terminating instance [ 2405.305967] env[68798]: DEBUG nova.compute.manager [None req-973a9bbf-b84f-4715-a371-689bfa1a2460 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2405.306179] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-973a9bbf-b84f-4715-a371-689bfa1a2460 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2405.306712] env[68798]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-62493085-01e9-47ef-9d02-77938ab9e2ee {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.316944] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7e6d36-38e5-46d6-ac2c-8097376ff440 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.343710] env[68798]: WARNING nova.virt.vmwareapi.vmops [None req-973a9bbf-b84f-4715-a371-689bfa1a2460 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5a65c46e-989d-4a8f-9387-86cde7725173 could not be found. [ 2405.343942] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-973a9bbf-b84f-4715-a371-689bfa1a2460 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2405.344146] env[68798]: INFO nova.compute.manager [None req-973a9bbf-b84f-4715-a371-689bfa1a2460 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2405.344394] env[68798]: DEBUG oslo.service.loopingcall [None req-973a9bbf-b84f-4715-a371-689bfa1a2460 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2405.344895] env[68798]: DEBUG nova.compute.manager [-] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2405.344994] env[68798]: DEBUG nova.network.neutron [-] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2405.382936] env[68798]: DEBUG nova.network.neutron [-] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2405.391260] env[68798]: INFO nova.compute.manager [-] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] Took 0.05 seconds to deallocate network for instance. [ 2405.492668] env[68798]: DEBUG nova.network.neutron [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: c1d601ee-85b3-421d-be0b-d9ee43c324a0] Updating instance_info_cache with network_info: [{"id": "193ed98d-585e-4592-ab2a-6cea4af56a3c", "address": "fa:16:3e:58:c7:62", "network": {"id": "0b7b81db-f8e9-4c4a-9e95-8ce4e76ecc2f", "bridge": "br-int", "label": "tempest-ServersTestJSON-1522738958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efeea8a59294c7ca8b499dda555a3d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c6324fd-a761-417c-bc85-b6278daecfc5", "external-id": "nsx-vlan-transportzone-426", "segmentation_id": 426, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap193ed98d-58", "ovs_interfaceid": "193ed98d-585e-4592-ab2a-6cea4af56a3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2405.506793] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Releasing lock "refresh_cache-c1d601ee-85b3-421d-be0b-d9ee43c324a0" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2405.507253] env[68798]: DEBUG nova.compute.manager [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: c1d601ee-85b3-421d-be0b-d9ee43c324a0] Instance network_info: |[{"id": "193ed98d-585e-4592-ab2a-6cea4af56a3c", "address": "fa:16:3e:58:c7:62", "network": {"id": "0b7b81db-f8e9-4c4a-9e95-8ce4e76ecc2f", "bridge": "br-int", "label": "tempest-ServersTestJSON-1522738958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efeea8a59294c7ca8b499dda555a3d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c6324fd-a761-417c-bc85-b6278daecfc5", "external-id": "nsx-vlan-transportzone-426", "segmentation_id": 426, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap193ed98d-58", "ovs_interfaceid": "193ed98d-585e-4592-ab2a-6cea4af56a3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2405.507995] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: c1d601ee-85b3-421d-be0b-d9ee43c324a0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:c7:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2c6324fd-a761-417c-bc85-b6278daecfc5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '193ed98d-585e-4592-ab2a-6cea4af56a3c', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2405.515872] env[68798]: DEBUG oslo.service.loopingcall [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2405.516708] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1d601ee-85b3-421d-be0b-d9ee43c324a0] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2405.518886] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b6ea02e3-5543-4402-9b77-5fb0760dcc9a {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.533940] env[68798]: DEBUG oslo_concurrency.lockutils [None req-973a9bbf-b84f-4715-a371-689bfa1a2460 tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Lock "5a65c46e-989d-4a8f-9387-86cde7725173" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.232s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2405.534959] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "5a65c46e-989d-4a8f-9387-86cde7725173" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 1.990s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2405.535105] env[68798]: INFO nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: 5a65c46e-989d-4a8f-9387-86cde7725173] During sync_power_state the instance has a pending task (deleting). Skip. [ 2405.535615] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "5a65c46e-989d-4a8f-9387-86cde7725173" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2405.541193] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2405.541193] env[68798]: value = "task-4217745" [ 2405.541193] env[68798]: _type = "Task" [ 2405.541193] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2405.550278] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217745, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2406.051889] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217745, 'name': CreateVM_Task, 'duration_secs': 0.351967} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2406.052082] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1d601ee-85b3-421d-be0b-d9ee43c324a0] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2406.052743] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2406.052914] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2406.053252] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2406.053520] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6549250a-8c00-45f9-8aac-e3868b1750c0 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2406.058716] env[68798]: DEBUG oslo_vmware.api [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Waiting for the task: (returnval){ [ 2406.058716] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]524d8c4d-9d36-f6e5-afd8-810fa547d1b1" [ 2406.058716] env[68798]: _type = "Task" [ 2406.058716] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2406.067437] env[68798]: DEBUG oslo_vmware.api [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]524d8c4d-9d36-f6e5-afd8-810fa547d1b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2406.569858] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2406.570242] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: c1d601ee-85b3-421d-be0b-d9ee43c324a0] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2406.570472] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2407.153654] env[68798]: DEBUG nova.compute.manager [req-667f2d77-d7f6-44c2-befb-42847f9a4635 req-4addad5e-6078-4796-a3ca-1dc21186aee5 service nova] [instance: c1d601ee-85b3-421d-be0b-d9ee43c324a0] Received event network-vif-plugged-193ed98d-585e-4592-ab2a-6cea4af56a3c {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2407.153907] env[68798]: DEBUG oslo_concurrency.lockutils [req-667f2d77-d7f6-44c2-befb-42847f9a4635 req-4addad5e-6078-4796-a3ca-1dc21186aee5 service nova] Acquiring lock "c1d601ee-85b3-421d-be0b-d9ee43c324a0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2407.154102] env[68798]: DEBUG oslo_concurrency.lockutils [req-667f2d77-d7f6-44c2-befb-42847f9a4635 req-4addad5e-6078-4796-a3ca-1dc21186aee5 service nova] Lock "c1d601ee-85b3-421d-be0b-d9ee43c324a0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2407.154277] env[68798]: DEBUG oslo_concurrency.lockutils [req-667f2d77-d7f6-44c2-befb-42847f9a4635 req-4addad5e-6078-4796-a3ca-1dc21186aee5 service nova] Lock "c1d601ee-85b3-421d-be0b-d9ee43c324a0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2407.154448] env[68798]: DEBUG nova.compute.manager [req-667f2d77-d7f6-44c2-befb-42847f9a4635 req-4addad5e-6078-4796-a3ca-1dc21186aee5 service nova] [instance: c1d601ee-85b3-421d-be0b-d9ee43c324a0] No waiting events found dispatching network-vif-plugged-193ed98d-585e-4592-ab2a-6cea4af56a3c {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2407.154622] env[68798]: WARNING nova.compute.manager [req-667f2d77-d7f6-44c2-befb-42847f9a4635 req-4addad5e-6078-4796-a3ca-1dc21186aee5 service nova] [instance: c1d601ee-85b3-421d-be0b-d9ee43c324a0] Received unexpected event network-vif-plugged-193ed98d-585e-4592-ab2a-6cea4af56a3c for instance with vm_state building and task_state spawning. [ 2407.154791] env[68798]: DEBUG nova.compute.manager [req-667f2d77-d7f6-44c2-befb-42847f9a4635 req-4addad5e-6078-4796-a3ca-1dc21186aee5 service nova] [instance: c1d601ee-85b3-421d-be0b-d9ee43c324a0] Received event network-changed-193ed98d-585e-4592-ab2a-6cea4af56a3c {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2407.154948] env[68798]: DEBUG nova.compute.manager [req-667f2d77-d7f6-44c2-befb-42847f9a4635 req-4addad5e-6078-4796-a3ca-1dc21186aee5 service nova] [instance: c1d601ee-85b3-421d-be0b-d9ee43c324a0] Refreshing instance network info cache due to event network-changed-193ed98d-585e-4592-ab2a-6cea4af56a3c. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 2407.155170] env[68798]: DEBUG oslo_concurrency.lockutils [req-667f2d77-d7f6-44c2-befb-42847f9a4635 req-4addad5e-6078-4796-a3ca-1dc21186aee5 service nova] Acquiring lock "refresh_cache-c1d601ee-85b3-421d-be0b-d9ee43c324a0" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2407.155346] env[68798]: DEBUG oslo_concurrency.lockutils [req-667f2d77-d7f6-44c2-befb-42847f9a4635 req-4addad5e-6078-4796-a3ca-1dc21186aee5 service nova] Acquired lock "refresh_cache-c1d601ee-85b3-421d-be0b-d9ee43c324a0" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2407.155514] env[68798]: DEBUG nova.network.neutron [req-667f2d77-d7f6-44c2-befb-42847f9a4635 req-4addad5e-6078-4796-a3ca-1dc21186aee5 service nova] [instance: c1d601ee-85b3-421d-be0b-d9ee43c324a0] Refreshing network info cache for port 193ed98d-585e-4592-ab2a-6cea4af56a3c {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2407.415112] env[68798]: DEBUG nova.network.neutron [req-667f2d77-d7f6-44c2-befb-42847f9a4635 req-4addad5e-6078-4796-a3ca-1dc21186aee5 service nova] [instance: c1d601ee-85b3-421d-be0b-d9ee43c324a0] Updated VIF entry in instance network info cache for port 193ed98d-585e-4592-ab2a-6cea4af56a3c. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2407.415492] env[68798]: DEBUG nova.network.neutron [req-667f2d77-d7f6-44c2-befb-42847f9a4635 req-4addad5e-6078-4796-a3ca-1dc21186aee5 service nova] [instance: c1d601ee-85b3-421d-be0b-d9ee43c324a0] Updating instance_info_cache with network_info: [{"id": "193ed98d-585e-4592-ab2a-6cea4af56a3c", "address": "fa:16:3e:58:c7:62", "network": {"id": "0b7b81db-f8e9-4c4a-9e95-8ce4e76ecc2f", "bridge": "br-int", "label": "tempest-ServersTestJSON-1522738958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8efeea8a59294c7ca8b499dda555a3d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c6324fd-a761-417c-bc85-b6278daecfc5", "external-id": "nsx-vlan-transportzone-426", "segmentation_id": 426, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap193ed98d-58", "ovs_interfaceid": "193ed98d-585e-4592-ab2a-6cea4af56a3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2407.425146] env[68798]: DEBUG oslo_concurrency.lockutils [req-667f2d77-d7f6-44c2-befb-42847f9a4635 req-4addad5e-6078-4796-a3ca-1dc21186aee5 service nova] Releasing lock "refresh_cache-c1d601ee-85b3-421d-be0b-d9ee43c324a0" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2436.076618] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2438.050465] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2438.050760] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Starting heal instance info cache {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 2438.050834] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Rebuilding the list of instances to heal {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2438.063734] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2438.063893] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: c1d601ee-85b3-421d-be0b-d9ee43c324a0] Skipping network cache update for instance because it is Building. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2438.064184] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Didn't find any instances for network info cache update. {{(pid=68798) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 2441.049184] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2442.048993] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2442.049219] env[68798]: DEBUG nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68798) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 2443.049524] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2444.044019] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2444.047703] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2449.048491] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2452.049195] env[68798]: DEBUG oslo_service.periodic_task [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68798) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2452.061378] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2452.061615] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2452.061780] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2452.062055] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68798) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2452.063300] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-757dd07a-2287-4470-9be9-01bbb5cf61b3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2452.072607] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57ec7436-4f21-4101-8893-2b41147d5cd1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2452.086472] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3e3a536-9006-49df-828c-3ba065ef22a7 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2452.093066] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c3bd265-2caf-404f-88c4-58f32c62f03b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2452.123301] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180669MB free_disk=3GB free_vcpus=48 pci_devices=None {{(pid=68798) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2452.123456] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2452.123591] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2452.167599] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance c5023157-fb4e-4f4b-b845-5fc9eac80cba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2452.167756] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Instance c1d601ee-85b3-421d-be0b-d9ee43c324a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68798) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2452.167938] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2452.168097] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=768MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=68798) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2452.206395] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eb1491f-e40a-4ad1-b114-a13b20931203 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2452.213915] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18b88b61-7705-4870-9feb-d283cb2804e3 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2452.243350] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66c8b256-f7b5-4f4d-b077-f4df6938dc3f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2452.250671] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b61bcd60-cff6-4ed7-a317-e21d77c659db {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2452.263867] env[68798]: DEBUG nova.compute.provider_tree [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2452.272649] env[68798]: DEBUG nova.scheduler.client.report [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2452.286319] env[68798]: DEBUG nova.compute.resource_tracker [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68798) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2452.286513] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.163s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2453.139244] env[68798]: WARNING oslo_vmware.rw_handles [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2453.139244] env[68798]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2453.139244] env[68798]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2453.139244] env[68798]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2453.139244] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2453.139244] env[68798]: ERROR oslo_vmware.rw_handles response.begin() [ 2453.139244] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2453.139244] env[68798]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2453.139244] env[68798]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2453.139244] env[68798]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2453.139244] env[68798]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2453.139244] env[68798]: ERROR oslo_vmware.rw_handles [ 2453.139964] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Downloaded image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to vmware_temp/187b5c27-39b1-4d82-b576-95bcdaf17d48/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2453.141926] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Caching image {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2453.142220] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Copying Virtual Disk [datastore1] vmware_temp/187b5c27-39b1-4d82-b576-95bcdaf17d48/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk to [datastore1] vmware_temp/187b5c27-39b1-4d82-b576-95bcdaf17d48/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk {{(pid=68798) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2453.142531] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7ca3017a-3de9-4fef-a13f-754a8e5e18b7 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.151702] env[68798]: DEBUG oslo_vmware.api [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Waiting for the task: (returnval){ [ 2453.151702] env[68798]: value = "task-4217746" [ 2453.151702] env[68798]: _type = "Task" [ 2453.151702] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2453.160306] env[68798]: DEBUG oslo_vmware.api [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Task: {'id': task-4217746, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2453.662767] env[68798]: DEBUG oslo_vmware.exceptions [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Fault InvalidArgument not matched. {{(pid=68798) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2453.663063] env[68798]: DEBUG oslo_concurrency.lockutils [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2453.663729] env[68798]: ERROR nova.compute.manager [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2453.663729] env[68798]: Faults: ['InvalidArgument'] [ 2453.663729] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Traceback (most recent call last): [ 2453.663729] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2453.663729] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] yield resources [ 2453.663729] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2453.663729] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] self.driver.spawn(context, instance, image_meta, [ 2453.663729] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2453.663729] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2453.663729] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2453.663729] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] self._fetch_image_if_missing(context, vi) [ 2453.663729] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2453.663729] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] image_cache(vi, tmp_image_ds_loc) [ 2453.663729] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2453.663729] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] vm_util.copy_virtual_disk( [ 2453.663729] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2453.663729] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] session._wait_for_task(vmdk_copy_task) [ 2453.663729] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2453.663729] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] return self.wait_for_task(task_ref) [ 2453.663729] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2453.663729] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] return evt.wait() [ 2453.663729] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2453.663729] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] result = hub.switch() [ 2453.663729] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2453.663729] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] return self.greenlet.switch() [ 2453.663729] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2453.663729] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] self.f(*self.args, **self.kw) [ 2453.663729] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2453.663729] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] raise exceptions.translate_fault(task_info.error) [ 2453.663729] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2453.663729] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Faults: ['InvalidArgument'] [ 2453.663729] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] [ 2453.665267] env[68798]: INFO nova.compute.manager [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Terminating instance [ 2453.665681] env[68798]: DEBUG oslo_concurrency.lockutils [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2453.665889] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2453.666156] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0ce71915-f3d0-4d1a-8f61-944af161f598 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.668446] env[68798]: DEBUG nova.compute.manager [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Start destroying the instance on the hypervisor. {{(pid=68798) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2453.668646] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Destroying instance {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2453.669395] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35b83ac-7704-4769-850e-6048f6b94d8f {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.676676] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Unregistering the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2453.676933] env[68798]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ef16d5d-4981-4be5-b8eb-28d54e9c4685 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.679351] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2453.679529] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68798) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2453.680548] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c03e798a-259a-4eca-9ec4-9d4b42bdf725 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.685693] env[68798]: DEBUG oslo_vmware.api [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Waiting for the task: (returnval){ [ 2453.685693] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]524b5894-b23a-18da-0689-a6a7ae50cdeb" [ 2453.685693] env[68798]: _type = "Task" [ 2453.685693] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2453.696418] env[68798]: DEBUG oslo_vmware.api [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]524b5894-b23a-18da-0689-a6a7ae50cdeb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2453.747976] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Unregistered the VM {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2453.748237] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Deleting contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2453.748427] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Deleting the datastore file [datastore1] c5023157-fb4e-4f4b-b845-5fc9eac80cba {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2453.748708] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fed5e30e-5c40-4a02-aa70-5984b8ba9a7c {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2453.756602] env[68798]: DEBUG oslo_vmware.api [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Waiting for the task: (returnval){ [ 2453.756602] env[68798]: value = "task-4217748" [ 2453.756602] env[68798]: _type = "Task" [ 2453.756602] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2453.764776] env[68798]: DEBUG oslo_vmware.api [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Task: {'id': task-4217748, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2454.196639] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: c1d601ee-85b3-421d-be0b-d9ee43c324a0] Preparing fetch location {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2454.197071] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Creating directory with path [datastore1] vmware_temp/afed13c9-a756-4416-ad0c-fcbbf5d02ae5/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2454.197169] env[68798]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ed426f8-f8fd-4d0a-b011-693949fa680b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2454.211631] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Created directory with path [datastore1] vmware_temp/afed13c9-a756-4416-ad0c-fcbbf5d02ae5/659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2454.211829] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: c1d601ee-85b3-421d-be0b-d9ee43c324a0] Fetch image to [datastore1] vmware_temp/afed13c9-a756-4416-ad0c-fcbbf5d02ae5/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2454.212015] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: c1d601ee-85b3-421d-be0b-d9ee43c324a0] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to [datastore1] vmware_temp/afed13c9-a756-4416-ad0c-fcbbf5d02ae5/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk on the data store datastore1 {{(pid=68798) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2454.212784] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18ea2c9b-156e-45b7-93db-6524c8c4f3db {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2454.219831] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fdc7702-2c3b-4182-9e08-a62d2de1e790 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2454.228917] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd4d839-4e67-4e6e-b572-5cf52a1d3e2d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2454.262090] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2d041be-d006-4ef8-aeca-361670ffad00 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2454.271359] env[68798]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7e8d11cf-4a5a-4a3c-9225-5f025b521fe6 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2454.273218] env[68798]: DEBUG oslo_vmware.api [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Task: {'id': task-4217748, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.318077} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2454.273463] env[68798]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Deleted the datastore file {{(pid=68798) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2454.273647] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Deleted contents of the VM from datastore datastore1 {{(pid=68798) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2454.273821] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Instance destroyed {{(pid=68798) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2454.273993] env[68798]: INFO nova.compute.manager [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2454.276237] env[68798]: DEBUG nova.compute.claims [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Aborting claim: {{(pid=68798) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2454.276429] env[68798]: DEBUG oslo_concurrency.lockutils [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2454.276643] env[68798]: DEBUG oslo_concurrency.lockutils [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2454.300260] env[68798]: DEBUG nova.virt.vmwareapi.images [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] [instance: c1d601ee-85b3-421d-be0b-d9ee43c324a0] Downloading image file data 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 to the data store datastore1 {{(pid=68798) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2454.355797] env[68798]: DEBUG oslo_vmware.rw_handles [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/afed13c9-a756-4416-ad0c-fcbbf5d02ae5/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2454.360041] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2297a55-1081-4ec1-9fef-baf5ba6c0546 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2454.429654] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d013ae2b-00b1-4f13-8f01-97a4aebe6330 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2454.433387] env[68798]: DEBUG oslo_vmware.rw_handles [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Completed reading data from the image iterator. {{(pid=68798) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2454.433604] env[68798]: DEBUG oslo_vmware.rw_handles [None req-7e24c087-7d99-4003-af2a-9ba8f027615a tempest-ServersTestJSON-1349294209 tempest-ServersTestJSON-1349294209-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/afed13c9-a756-4416-ad0c-fcbbf5d02ae5/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68798) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2454.462113] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adc5cdc6-94c0-40e8-9b44-2a01bd907cc5 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2454.469790] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff35b6e4-9117-4d88-8d2e-b2ec50a28cdf {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2454.482890] env[68798]: DEBUG nova.compute.provider_tree [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2454.491771] env[68798]: DEBUG nova.scheduler.client.report [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2454.505619] env[68798]: DEBUG oslo_concurrency.lockutils [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.229s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2454.506180] env[68798]: ERROR nova.compute.manager [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2454.506180] env[68798]: Faults: ['InvalidArgument'] [ 2454.506180] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Traceback (most recent call last): [ 2454.506180] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2454.506180] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] self.driver.spawn(context, instance, image_meta, [ 2454.506180] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2454.506180] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2454.506180] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2454.506180] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] self._fetch_image_if_missing(context, vi) [ 2454.506180] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2454.506180] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] image_cache(vi, tmp_image_ds_loc) [ 2454.506180] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2454.506180] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] vm_util.copy_virtual_disk( [ 2454.506180] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2454.506180] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] session._wait_for_task(vmdk_copy_task) [ 2454.506180] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2454.506180] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] return self.wait_for_task(task_ref) [ 2454.506180] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2454.506180] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] return evt.wait() [ 2454.506180] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2454.506180] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] result = hub.switch() [ 2454.506180] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2454.506180] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] return self.greenlet.switch() [ 2454.506180] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2454.506180] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] self.f(*self.args, **self.kw) [ 2454.506180] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2454.506180] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] raise exceptions.translate_fault(task_info.error) [ 2454.506180] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2454.506180] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Faults: ['InvalidArgument'] [ 2454.506180] env[68798]: ERROR nova.compute.manager [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] [ 2454.506907] env[68798]: DEBUG nova.compute.utils [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] VimFaultException {{(pid=68798) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2454.508333] env[68798]: DEBUG nova.compute.manager [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Build of instance c5023157-fb4e-4f4b-b845-5fc9eac80cba was re-scheduled: A specified parameter was not correct: fileType [ 2454.508333] env[68798]: Faults: ['InvalidArgument'] {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2454.508727] env[68798]: DEBUG nova.compute.manager [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Unplugging VIFs for instance {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2454.508912] env[68798]: DEBUG nova.compute.manager [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68798) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2454.509098] env[68798]: DEBUG nova.compute.manager [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Deallocating network for instance {{(pid=68798) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2454.509282] env[68798]: DEBUG nova.network.neutron [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] deallocate_for_instance() {{(pid=68798) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2454.882340] env[68798]: DEBUG nova.network.neutron [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Updating instance_info_cache with network_info: [] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2454.893891] env[68798]: INFO nova.compute.manager [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] Took 0.38 seconds to deallocate network for instance. [ 2454.989231] env[68798]: INFO nova.scheduler.client.report [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Deleted allocations for instance c5023157-fb4e-4f4b-b845-5fc9eac80cba [ 2455.014281] env[68798]: DEBUG oslo_concurrency.lockutils [None req-dfdcfc5a-5350-49e3-89f3-4fd4b16fa776 tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Lock "c5023157-fb4e-4f4b-b845-5fc9eac80cba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 126.213s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2455.014479] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "c5023157-fb4e-4f4b-b845-5fc9eac80cba" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 51.469s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2455.014579] env[68798]: INFO nova.compute.manager [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] [instance: c5023157-fb4e-4f4b-b845-5fc9eac80cba] During sync_power_state the instance has a pending task (spawning). Skip. [ 2455.014724] env[68798]: DEBUG oslo_concurrency.lockutils [None req-2b4b2577-ebc0-4953-ad7d-55c541d0fca7 None None] Lock "c5023157-fb4e-4f4b-b845-5fc9eac80cba" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2456.716107] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Acquiring lock "75d57c9c-5c4b-435d-9837-1f22a1904259" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2456.716511] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Lock "75d57c9c-5c4b-435d-9837-1f22a1904259" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2456.727946] env[68798]: DEBUG nova.compute.manager [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 75d57c9c-5c4b-435d-9837-1f22a1904259] Starting instance... {{(pid=68798) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2456.778945] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2456.779220] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2456.780885] env[68798]: INFO nova.compute.claims [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 75d57c9c-5c4b-435d-9837-1f22a1904259] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2456.861765] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b1f56ff-1cff-42c9-8a3b-018bdf6eac64 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2456.870064] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-345d8ba5-d7df-4ae4-b8cf-15237bb8f248 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2456.901397] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad78219-e1eb-438b-a328-3582506bb2ff {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2456.909749] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2fd485e-14d6-4dc5-96c2-42a41d05ce4b {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2456.924403] env[68798]: DEBUG nova.compute.provider_tree [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Inventory has not changed in ProviderTree for provider: 855bb535-a51f-4f9d-8f32-8a3291b17319 {{(pid=68798) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2456.936983] env[68798]: DEBUG nova.scheduler.client.report [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Inventory has not changed for provider 855bb535-a51f-4f9d-8f32-8a3291b17319 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 3, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68798) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2456.951344] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.172s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2456.951933] env[68798]: DEBUG nova.compute.manager [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 75d57c9c-5c4b-435d-9837-1f22a1904259] Start building networks asynchronously for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2456.988025] env[68798]: DEBUG nova.compute.utils [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Using /dev/sd instead of None {{(pid=68798) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2456.989772] env[68798]: DEBUG nova.compute.manager [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 75d57c9c-5c4b-435d-9837-1f22a1904259] Allocating IP information in the background. {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2456.989772] env[68798]: DEBUG nova.network.neutron [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 75d57c9c-5c4b-435d-9837-1f22a1904259] allocate_for_instance() {{(pid=68798) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2457.001904] env[68798]: DEBUG nova.compute.manager [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 75d57c9c-5c4b-435d-9837-1f22a1904259] Start building block device mappings for instance. {{(pid=68798) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2457.057230] env[68798]: DEBUG nova.policy [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c2028f2f661549a6b1607fed075b9a35', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ba3727c039d41daacf0d1d32f7261d6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68798) authorize /opt/stack/nova/nova/policy.py:203}} [ 2457.067564] env[68798]: DEBUG nova.compute.manager [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 75d57c9c-5c4b-435d-9837-1f22a1904259] Start spawning the instance on the hypervisor. {{(pid=68798) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2457.093821] env[68798]: DEBUG nova.virt.hardware [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-16T20:31:07Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-16T20:30:51Z,direct_url=,disk_format='vmdk',id=659c1b0c-65c8-46ab-93ff-5947bb8f4c70,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cc340e748dae4a43b16acfcfeecd7cd0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-16T20:30:52Z,virtual_size=,visibility=), allow threads: False {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2457.094104] env[68798]: DEBUG nova.virt.hardware [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Flavor limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2457.094282] env[68798]: DEBUG nova.virt.hardware [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Image limits 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2457.094472] env[68798]: DEBUG nova.virt.hardware [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Flavor pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2457.094621] env[68798]: DEBUG nova.virt.hardware [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Image pref 0:0:0 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2457.094769] env[68798]: DEBUG nova.virt.hardware [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68798) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2457.094982] env[68798]: DEBUG nova.virt.hardware [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2457.095164] env[68798]: DEBUG nova.virt.hardware [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2457.095334] env[68798]: DEBUG nova.virt.hardware [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Got 1 possible topologies {{(pid=68798) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2457.095499] env[68798]: DEBUG nova.virt.hardware [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2457.095721] env[68798]: DEBUG nova.virt.hardware [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68798) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2457.096632] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0b2761e-ccd7-4ffa-ae04-666105d7292d {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2457.105411] env[68798]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1afefe64-a92d-49f7-ad43-be65dd8fce54 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2457.376979] env[68798]: DEBUG nova.network.neutron [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 75d57c9c-5c4b-435d-9837-1f22a1904259] Successfully created port: 4be37e6d-5d16-4b03-b610-9579dd8bc9f6 {{(pid=68798) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2458.016154] env[68798]: DEBUG nova.compute.manager [req-0aecfd84-9f66-4d25-a5af-7b6191af2b55 req-c07098ab-bc91-49ee-85fd-417e51145b80 service nova] [instance: 75d57c9c-5c4b-435d-9837-1f22a1904259] Received event network-vif-plugged-4be37e6d-5d16-4b03-b610-9579dd8bc9f6 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2458.016527] env[68798]: DEBUG oslo_concurrency.lockutils [req-0aecfd84-9f66-4d25-a5af-7b6191af2b55 req-c07098ab-bc91-49ee-85fd-417e51145b80 service nova] Acquiring lock "75d57c9c-5c4b-435d-9837-1f22a1904259-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2458.016853] env[68798]: DEBUG oslo_concurrency.lockutils [req-0aecfd84-9f66-4d25-a5af-7b6191af2b55 req-c07098ab-bc91-49ee-85fd-417e51145b80 service nova] Lock "75d57c9c-5c4b-435d-9837-1f22a1904259-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2458.017155] env[68798]: DEBUG oslo_concurrency.lockutils [req-0aecfd84-9f66-4d25-a5af-7b6191af2b55 req-c07098ab-bc91-49ee-85fd-417e51145b80 service nova] Lock "75d57c9c-5c4b-435d-9837-1f22a1904259-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68798) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2458.017441] env[68798]: DEBUG nova.compute.manager [req-0aecfd84-9f66-4d25-a5af-7b6191af2b55 req-c07098ab-bc91-49ee-85fd-417e51145b80 service nova] [instance: 75d57c9c-5c4b-435d-9837-1f22a1904259] No waiting events found dispatching network-vif-plugged-4be37e6d-5d16-4b03-b610-9579dd8bc9f6 {{(pid=68798) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2458.017710] env[68798]: WARNING nova.compute.manager [req-0aecfd84-9f66-4d25-a5af-7b6191af2b55 req-c07098ab-bc91-49ee-85fd-417e51145b80 service nova] [instance: 75d57c9c-5c4b-435d-9837-1f22a1904259] Received unexpected event network-vif-plugged-4be37e6d-5d16-4b03-b610-9579dd8bc9f6 for instance with vm_state building and task_state spawning. [ 2458.143391] env[68798]: DEBUG nova.network.neutron [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 75d57c9c-5c4b-435d-9837-1f22a1904259] Successfully updated port: 4be37e6d-5d16-4b03-b610-9579dd8bc9f6 {{(pid=68798) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2458.155580] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Acquiring lock "refresh_cache-75d57c9c-5c4b-435d-9837-1f22a1904259" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2458.155774] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Acquired lock "refresh_cache-75d57c9c-5c4b-435d-9837-1f22a1904259" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2458.155898] env[68798]: DEBUG nova.network.neutron [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 75d57c9c-5c4b-435d-9837-1f22a1904259] Building network info cache for instance {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2458.226379] env[68798]: DEBUG nova.network.neutron [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 75d57c9c-5c4b-435d-9837-1f22a1904259] Instance cache missing network info. {{(pid=68798) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2458.444879] env[68798]: DEBUG nova.network.neutron [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 75d57c9c-5c4b-435d-9837-1f22a1904259] Updating instance_info_cache with network_info: [{"id": "4be37e6d-5d16-4b03-b610-9579dd8bc9f6", "address": "fa:16:3e:bc:30:65", "network": {"id": "62ee92b6-f97c-4e68-9dbd-f52d1e5dbcb3", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-789824559-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ba3727c039d41daacf0d1d32f7261d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4be37e6d-5d", "ovs_interfaceid": "4be37e6d-5d16-4b03-b610-9579dd8bc9f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2458.457359] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Releasing lock "refresh_cache-75d57c9c-5c4b-435d-9837-1f22a1904259" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2458.457740] env[68798]: DEBUG nova.compute.manager [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 75d57c9c-5c4b-435d-9837-1f22a1904259] Instance network_info: |[{"id": "4be37e6d-5d16-4b03-b610-9579dd8bc9f6", "address": "fa:16:3e:bc:30:65", "network": {"id": "62ee92b6-f97c-4e68-9dbd-f52d1e5dbcb3", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-789824559-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ba3727c039d41daacf0d1d32f7261d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4be37e6d-5d", "ovs_interfaceid": "4be37e6d-5d16-4b03-b610-9579dd8bc9f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68798) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2458.458696] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 75d57c9c-5c4b-435d-9837-1f22a1904259] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bc:30:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd88b750a-0e7d-4f16-8bd5-8e6d5743b720', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4be37e6d-5d16-4b03-b610-9579dd8bc9f6', 'vif_model': 'vmxnet3'}] {{(pid=68798) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2458.466689] env[68798]: DEBUG oslo.service.loopingcall [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68798) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2458.467708] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75d57c9c-5c4b-435d-9837-1f22a1904259] Creating VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2458.467963] env[68798]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6207e57e-3f68-46ef-a241-329389aa49ea {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2458.490260] env[68798]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2458.490260] env[68798]: value = "task-4217749" [ 2458.490260] env[68798]: _type = "Task" [ 2458.490260] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2458.502050] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217749, 'name': CreateVM_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2459.000788] env[68798]: DEBUG oslo_vmware.api [-] Task: {'id': task-4217749, 'name': CreateVM_Task, 'duration_secs': 0.416556} completed successfully. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2459.000977] env[68798]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75d57c9c-5c4b-435d-9837-1f22a1904259] Created VM on the ESX host {{(pid=68798) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2459.001614] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2459.001779] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Acquired lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2459.002148] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2459.002418] env[68798]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4ad09de-bc1d-4b65-8796-0075f039c6c1 {{(pid=68798) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2459.007392] env[68798]: DEBUG oslo_vmware.api [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Waiting for the task: (returnval){ [ 2459.007392] env[68798]: value = "session[52d02cf8-0819-edf1-34ac-1331c58488bc]526e95f8-98ec-819d-9bba-88f5c8c1e61e" [ 2459.007392] env[68798]: _type = "Task" [ 2459.007392] env[68798]: } to complete. {{(pid=68798) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2459.015713] env[68798]: DEBUG oslo_vmware.api [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Task: {'id': session[52d02cf8-0819-edf1-34ac-1331c58488bc]526e95f8-98ec-819d-9bba-88f5c8c1e61e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68798) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2459.519478] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Releasing lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2459.519859] env[68798]: DEBUG nova.virt.vmwareapi.vmops [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] [instance: 75d57c9c-5c4b-435d-9837-1f22a1904259] Processing image 659c1b0c-65c8-46ab-93ff-5947bb8f4c70 {{(pid=68798) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2459.519948] env[68798]: DEBUG oslo_concurrency.lockutils [None req-c297cf0d-d12e-4dd4-91c7-3022ebdc668b tempest-ServerDiskConfigTestJSON-1537959740 tempest-ServerDiskConfigTestJSON-1537959740-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/659c1b0c-65c8-46ab-93ff-5947bb8f4c70/659c1b0c-65c8-46ab-93ff-5947bb8f4c70.vmdk" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2460.043395] env[68798]: DEBUG nova.compute.manager [req-fbf02e9a-e56c-4168-9d46-6bc731498313 req-59911a2c-c092-4418-a74c-5d58eaefe2da service nova] [instance: 75d57c9c-5c4b-435d-9837-1f22a1904259] Received event network-changed-4be37e6d-5d16-4b03-b610-9579dd8bc9f6 {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2460.043608] env[68798]: DEBUG nova.compute.manager [req-fbf02e9a-e56c-4168-9d46-6bc731498313 req-59911a2c-c092-4418-a74c-5d58eaefe2da service nova] [instance: 75d57c9c-5c4b-435d-9837-1f22a1904259] Refreshing instance network info cache due to event network-changed-4be37e6d-5d16-4b03-b610-9579dd8bc9f6. {{(pid=68798) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 2460.043830] env[68798]: DEBUG oslo_concurrency.lockutils [req-fbf02e9a-e56c-4168-9d46-6bc731498313 req-59911a2c-c092-4418-a74c-5d58eaefe2da service nova] Acquiring lock "refresh_cache-75d57c9c-5c4b-435d-9837-1f22a1904259" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2460.043975] env[68798]: DEBUG oslo_concurrency.lockutils [req-fbf02e9a-e56c-4168-9d46-6bc731498313 req-59911a2c-c092-4418-a74c-5d58eaefe2da service nova] Acquired lock "refresh_cache-75d57c9c-5c4b-435d-9837-1f22a1904259" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2460.044201] env[68798]: DEBUG nova.network.neutron [req-fbf02e9a-e56c-4168-9d46-6bc731498313 req-59911a2c-c092-4418-a74c-5d58eaefe2da service nova] [instance: 75d57c9c-5c4b-435d-9837-1f22a1904259] Refreshing network info cache for port 4be37e6d-5d16-4b03-b610-9579dd8bc9f6 {{(pid=68798) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2460.484890] env[68798]: DEBUG nova.network.neutron [req-fbf02e9a-e56c-4168-9d46-6bc731498313 req-59911a2c-c092-4418-a74c-5d58eaefe2da service nova] [instance: 75d57c9c-5c4b-435d-9837-1f22a1904259] Updated VIF entry in instance network info cache for port 4be37e6d-5d16-4b03-b610-9579dd8bc9f6. {{(pid=68798) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2460.485304] env[68798]: DEBUG nova.network.neutron [req-fbf02e9a-e56c-4168-9d46-6bc731498313 req-59911a2c-c092-4418-a74c-5d58eaefe2da service nova] [instance: 75d57c9c-5c4b-435d-9837-1f22a1904259] Updating instance_info_cache with network_info: [{"id": "4be37e6d-5d16-4b03-b610-9579dd8bc9f6", "address": "fa:16:3e:bc:30:65", "network": {"id": "62ee92b6-f97c-4e68-9dbd-f52d1e5dbcb3", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-789824559-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ba3727c039d41daacf0d1d32f7261d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4be37e6d-5d", "ovs_interfaceid": "4be37e6d-5d16-4b03-b610-9579dd8bc9f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68798) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2460.495030] env[68798]: DEBUG oslo_concurrency.lockutils [req-fbf02e9a-e56c-4168-9d46-6bc731498313 req-59911a2c-c092-4418-a74c-5d58eaefe2da service nova] Releasing lock "refresh_cache-75d57c9c-5c4b-435d-9837-1f22a1904259" {{(pid=68798) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}}